2018-06-28 08:44:43 +00:00
|
|
|
#include <Limelight.h>
|
2018-07-18 03:00:16 +00:00
|
|
|
#include "ffmpeg.h"
|
2018-09-08 22:27:21 +00:00
|
|
|
#include "streaming/streamutils.h"
|
2019-01-20 07:05:56 +00:00
|
|
|
#include "streaming/session.h"
|
|
|
|
|
2018-10-13 00:59:53 +00:00
|
|
|
#include <h264_stream.h>
|
2018-07-08 04:52:20 +00:00
|
|
|
|
2019-02-13 03:58:36 +00:00
|
|
|
#include "ffmpeg-renderers/sdlvid.h"
|
|
|
|
|
2018-07-22 00:00:09 +00:00
|
|
|
#ifdef Q_OS_WIN32
|
2018-07-18 03:00:16 +00:00
|
|
|
#include "ffmpeg-renderers/dxva2.h"
|
2018-07-13 09:28:10 +00:00
|
|
|
#endif
|
|
|
|
|
2018-07-22 00:00:09 +00:00
|
|
|
#ifdef Q_OS_DARWIN
|
2018-07-18 03:00:16 +00:00
|
|
|
#include "ffmpeg-renderers/vt.h"
|
2018-07-22 00:00:09 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef HAVE_LIBVA
|
2018-07-21 07:16:03 +00:00
|
|
|
#include "ffmpeg-renderers/vaapi.h"
|
|
|
|
#endif
|
|
|
|
|
2018-08-03 09:11:44 +00:00
|
|
|
#ifdef HAVE_LIBVDPAU
|
|
|
|
#include "ffmpeg-renderers/vdpau.h"
|
|
|
|
#endif
|
|
|
|
|
2019-04-16 08:20:21 +00:00
|
|
|
#ifdef HAVE_MMAL
|
|
|
|
#include "ffmpeg-renderers/mmal.h"
|
|
|
|
#endif
|
|
|
|
|
2019-04-21 05:22:37 +00:00
|
|
|
#ifdef HAVE_DRM
|
|
|
|
#include "ffmpeg-renderers/drm.h"
|
|
|
|
#endif
|
|
|
|
|
2020-04-13 08:40:28 +00:00
|
|
|
#ifdef HAVE_EGL
|
|
|
|
#include "ffmpeg-renderers/eglvid.h"
|
|
|
|
#endif
|
|
|
|
|
2021-12-07 00:22:39 +00:00
|
|
|
#ifdef HAVE_CUDA
|
|
|
|
#include "ffmpeg-renderers/cuda.h"
|
|
|
|
#endif
|
|
|
|
|
2018-08-03 04:37:46 +00:00
|
|
|
// This is gross but it allows us to use sizeof()
|
|
|
|
#include "ffmpeg_videosamples.cpp"
|
2018-07-13 09:28:10 +00:00
|
|
|
|
2018-10-13 00:59:53 +00:00
|
|
|
#define MAX_SPS_EXTRA_SIZE 16
|
|
|
|
|
2018-08-10 01:39:38 +00:00
|
|
|
#define FAILED_DECODES_RESET_THRESHOLD 20
|
|
|
|
|
2021-12-15 02:41:27 +00:00
|
|
|
#define MAX_RECV_FRAME_RETRIES 100
|
|
|
|
|
2018-08-03 04:37:46 +00:00
|
|
|
bool FFmpegVideoDecoder::isHardwareAccelerated()
|
|
|
|
{
|
2019-04-10 04:50:22 +00:00
|
|
|
return m_HwDecodeCfg != nullptr ||
|
|
|
|
(m_VideoDecoderCtx->codec->capabilities & AV_CODEC_CAP_HARDWARE) != 0;
|
2018-08-03 04:37:46 +00:00
|
|
|
}
|
2018-07-13 09:28:10 +00:00
|
|
|
|
2020-02-09 19:35:05 +00:00
|
|
|
bool FFmpegVideoDecoder::isAlwaysFullScreen()
|
|
|
|
{
|
2021-01-30 23:57:34 +00:00
|
|
|
return m_FrontendRenderer->getRendererAttributes() & RENDERER_ATTRIBUTE_FULLSCREEN_ONLY;
|
2020-02-09 19:35:05 +00:00
|
|
|
}
|
|
|
|
|
2018-08-25 19:38:04 +00:00
|
|
|
int FFmpegVideoDecoder::getDecoderCapabilities()
|
|
|
|
{
|
2020-01-26 22:13:42 +00:00
|
|
|
int capabilities = m_BackendRenderer->getDecoderCapabilities();
|
|
|
|
|
|
|
|
if (!isHardwareAccelerated()) {
|
|
|
|
// Slice up to 4 times for parallel CPU decoding, once slice per core
|
|
|
|
int slices = qMin(MAX_SLICES, SDL_GetCPUCount());
|
|
|
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Encoder configured for %d slices per frame",
|
|
|
|
slices);
|
|
|
|
capabilities |= CAPABILITY_SLICES_PER_FRAME(slices);
|
|
|
|
}
|
|
|
|
|
|
|
|
return capabilities;
|
2018-08-25 19:38:04 +00:00
|
|
|
}
|
|
|
|
|
2019-12-14 23:25:56 +00:00
|
|
|
int FFmpegVideoDecoder::getDecoderColorspace()
|
|
|
|
{
|
|
|
|
return m_FrontendRenderer->getDecoderColorspace();
|
|
|
|
}
|
|
|
|
|
2020-02-23 08:43:43 +00:00
|
|
|
QSize FFmpegVideoDecoder::getDecoderMaxResolution()
|
|
|
|
{
|
|
|
|
if (m_BackendRenderer->getRendererAttributes() & RENDERER_ATTRIBUTE_1080P_MAX) {
|
|
|
|
return QSize(1920, 1080);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// No known maximum
|
|
|
|
return QSize(0, 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-03 04:37:46 +00:00
|
|
|
enum AVPixelFormat FFmpegVideoDecoder::ffGetFormat(AVCodecContext* context,
|
|
|
|
const enum AVPixelFormat* pixFmts)
|
|
|
|
{
|
|
|
|
FFmpegVideoDecoder* decoder = (FFmpegVideoDecoder*)context->opaque;
|
|
|
|
const enum AVPixelFormat *p;
|
2018-07-13 09:28:10 +00:00
|
|
|
|
2018-08-03 04:37:46 +00:00
|
|
|
for (p = pixFmts; *p != -1; p++) {
|
2020-01-26 23:02:29 +00:00
|
|
|
// Only match our hardware decoding codec or preferred SW pixel
|
2018-08-03 04:37:46 +00:00
|
|
|
// format (if not using hardware decoding). It's crucial
|
|
|
|
// to override the default get_format() which will try
|
|
|
|
// to gracefully fall back to software decode and break us.
|
|
|
|
if (*p == (decoder->m_HwDecodeCfg ?
|
|
|
|
decoder->m_HwDecodeCfg->pix_fmt :
|
|
|
|
context->pix_fmt)) {
|
|
|
|
return *p;
|
2018-07-13 09:28:10 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-26 23:02:29 +00:00
|
|
|
// Failed to match the preferred pixel formats. Try non-preferred options for non-hwaccel decoders.
|
|
|
|
if (decoder->m_HwDecodeCfg == nullptr) {
|
|
|
|
for (p = pixFmts; *p != -1; p++) {
|
|
|
|
if (decoder->m_FrontendRenderer->isPixelFormatSupported(decoder->m_VideoFormat, *p)) {
|
|
|
|
return *p;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-03 04:37:46 +00:00
|
|
|
return AV_PIX_FMT_NONE;
|
2018-07-13 09:28:10 +00:00
|
|
|
}
|
|
|
|
|
2019-02-13 02:42:53 +00:00
|
|
|
FFmpegVideoDecoder::FFmpegVideoDecoder(bool testOnly)
|
2021-03-05 23:47:04 +00:00
|
|
|
: m_Pkt(av_packet_alloc()),
|
|
|
|
m_VideoDecoderCtx(nullptr),
|
2018-07-18 03:00:16 +00:00
|
|
|
m_DecodeBuffer(1024 * 1024, 0),
|
|
|
|
m_HwDecodeCfg(nullptr),
|
2019-04-12 05:27:20 +00:00
|
|
|
m_BackendRenderer(nullptr),
|
|
|
|
m_FrontendRenderer(nullptr),
|
2018-08-16 06:57:03 +00:00
|
|
|
m_ConsecutiveFailedDecodes(0),
|
2018-09-25 07:47:59 +00:00
|
|
|
m_Pacer(nullptr),
|
2020-01-27 04:15:11 +00:00
|
|
|
m_FramesIn(0),
|
|
|
|
m_FramesOut(0),
|
2018-09-25 07:47:59 +00:00
|
|
|
m_LastFrameNumber(0),
|
2018-10-13 00:59:53 +00:00
|
|
|
m_StreamFps(0),
|
2020-01-26 23:02:29 +00:00
|
|
|
m_VideoFormat(0),
|
2019-02-13 02:42:53 +00:00
|
|
|
m_NeedsSpsFixup(false),
|
2021-12-15 02:41:27 +00:00
|
|
|
m_TestOnly(testOnly),
|
|
|
|
m_CanRetryReceiveFrame(RRF_UNKNOWN)
|
2018-07-13 09:28:10 +00:00
|
|
|
{
|
2018-09-25 07:47:59 +00:00
|
|
|
SDL_zero(m_ActiveWndVideoStats);
|
|
|
|
SDL_zero(m_LastWndVideoStats);
|
|
|
|
SDL_zero(m_GlobalVideoStats);
|
|
|
|
|
2018-07-16 09:07:32 +00:00
|
|
|
// Use linear filtering when renderer scaling is required
|
|
|
|
SDL_SetHint(SDL_HINT_RENDER_SCALE_QUALITY, "1");
|
2018-07-18 03:00:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
FFmpegVideoDecoder::~FFmpegVideoDecoder()
|
2018-08-03 04:37:46 +00:00
|
|
|
{
|
|
|
|
reset();
|
2019-02-14 02:34:59 +00:00
|
|
|
|
|
|
|
// Set log level back to default.
|
|
|
|
// NB: We don't do this in reset() because we want
|
|
|
|
// to preserve the log level across reset() during
|
|
|
|
// test initialization.
|
|
|
|
av_log_set_level(AV_LOG_INFO);
|
2021-03-05 23:47:04 +00:00
|
|
|
|
|
|
|
av_packet_free(&m_Pkt);
|
2018-08-03 04:37:46 +00:00
|
|
|
}
|
|
|
|
|
2019-04-12 05:27:20 +00:00
|
|
|
IFFmpegRenderer* FFmpegVideoDecoder::getBackendRenderer()
|
2018-08-03 05:28:59 +00:00
|
|
|
{
|
2019-04-12 05:27:20 +00:00
|
|
|
return m_BackendRenderer;
|
2018-08-03 05:28:59 +00:00
|
|
|
}
|
|
|
|
|
2018-08-03 04:37:46 +00:00
|
|
|
void FFmpegVideoDecoder::reset()
|
2018-07-18 03:00:16 +00:00
|
|
|
{
|
2018-08-16 06:57:03 +00:00
|
|
|
delete m_Pacer;
|
|
|
|
m_Pacer = nullptr;
|
|
|
|
|
2018-08-25 18:21:52 +00:00
|
|
|
// This must be called after deleting Pacer because it
|
|
|
|
// may be holding AVFrames to free in its destructor.
|
|
|
|
// However, it must be called before deleting the IFFmpegRenderer
|
|
|
|
// since the codec context may be referencing objects that we
|
|
|
|
// need to delete in the renderer destructor.
|
|
|
|
avcodec_free_context(&m_VideoDecoderCtx);
|
|
|
|
|
2019-02-13 02:43:38 +00:00
|
|
|
if (!m_TestOnly) {
|
|
|
|
Session::get()->getOverlayManager().setOverlayRenderer(nullptr);
|
|
|
|
}
|
|
|
|
|
2019-04-12 05:27:20 +00:00
|
|
|
// If we have a separate frontend renderer, free that first
|
|
|
|
if (m_FrontendRenderer != m_BackendRenderer) {
|
|
|
|
delete m_FrontendRenderer;
|
|
|
|
}
|
|
|
|
|
|
|
|
delete m_BackendRenderer;
|
2019-04-19 02:26:13 +00:00
|
|
|
|
|
|
|
m_FrontendRenderer = m_BackendRenderer = nullptr;
|
2018-09-25 07:47:59 +00:00
|
|
|
|
2019-02-13 02:42:53 +00:00
|
|
|
if (!m_TestOnly) {
|
|
|
|
logVideoStats(m_GlobalVideoStats, "Global video stats");
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Test-only decoders can't have any frames submitted
|
|
|
|
SDL_assert(m_GlobalVideoStats.totalFrames == 0);
|
|
|
|
}
|
2018-07-18 03:00:16 +00:00
|
|
|
}
|
|
|
|
|
2021-03-23 03:51:29 +00:00
|
|
|
bool FFmpegVideoDecoder::createFrontendRenderer(PDECODER_PARAMETERS params, bool eglOnly)
|
2018-07-18 03:00:16 +00:00
|
|
|
{
|
2021-03-23 03:51:29 +00:00
|
|
|
if (eglOnly) {
|
2020-06-14 03:21:54 +00:00
|
|
|
#ifdef HAVE_EGL
|
2021-03-23 03:51:29 +00:00
|
|
|
if (m_BackendRenderer->canExportEGL()) {
|
|
|
|
m_FrontendRenderer = new EGLRenderer(m_BackendRenderer);
|
|
|
|
if (m_FrontendRenderer->initialize(params)) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
delete m_FrontendRenderer;
|
|
|
|
m_FrontendRenderer = nullptr;
|
2020-06-14 03:21:54 +00:00
|
|
|
}
|
|
|
|
#endif
|
2021-03-23 03:51:29 +00:00
|
|
|
// If we made it here, we failed to create the EGLRenderer
|
|
|
|
return false;
|
|
|
|
}
|
2020-06-14 03:21:54 +00:00
|
|
|
|
2019-04-13 05:54:21 +00:00
|
|
|
if (m_BackendRenderer->isDirectRenderingSupported()) {
|
|
|
|
// The backend renderer can render to the display
|
|
|
|
m_FrontendRenderer = m_BackendRenderer;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// The backend renderer cannot directly render to the display, so
|
|
|
|
// we will create an SDL renderer to draw the frames.
|
|
|
|
m_FrontendRenderer = new SdlRenderer();
|
|
|
|
if (!m_FrontendRenderer->initialize(params)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
2019-02-13 02:42:53 +00:00
|
|
|
|
2019-04-12 05:27:20 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-05-25 04:26:38 +00:00
|
|
|
bool FFmpegVideoDecoder::completeInitialization(const AVCodec* decoder, PDECODER_PARAMETERS params, bool testFrame, bool eglOnly)
|
2019-04-12 05:27:20 +00:00
|
|
|
{
|
|
|
|
// In test-only mode, we should only see test frames
|
|
|
|
SDL_assert(!m_TestOnly || testFrame);
|
|
|
|
|
|
|
|
// Create the frontend renderer based on the capabilities of the backend renderer
|
2021-03-23 03:51:29 +00:00
|
|
|
if (!createFrontendRenderer(params, eglOnly)) {
|
2019-04-12 05:27:20 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
m_StreamFps = params->frameRate;
|
2020-01-26 23:02:29 +00:00
|
|
|
m_VideoFormat = params->videoFormat;
|
2019-02-13 02:42:53 +00:00
|
|
|
|
|
|
|
// Don't bother initializing Pacer if we're not actually going to render
|
|
|
|
if (!testFrame) {
|
2019-04-12 05:27:20 +00:00
|
|
|
m_Pacer = new Pacer(m_FrontendRenderer, &m_ActiveWndVideoStats);
|
|
|
|
if (!m_Pacer->initialize(params->window, params->frameRate, params->enableFramePacing)) {
|
2019-02-13 02:42:53 +00:00
|
|
|
return false;
|
|
|
|
}
|
2018-08-16 06:57:03 +00:00
|
|
|
}
|
|
|
|
|
2018-07-18 03:00:16 +00:00
|
|
|
m_VideoDecoderCtx = avcodec_alloc_context3(decoder);
|
|
|
|
if (!m_VideoDecoderCtx) {
|
2018-07-08 04:52:20 +00:00
|
|
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Unable to allocate video decoder context");
|
2018-07-18 03:00:16 +00:00
|
|
|
return false;
|
2018-07-08 04:52:20 +00:00
|
|
|
}
|
|
|
|
|
2018-07-13 09:28:10 +00:00
|
|
|
// Always request low delay decoding
|
2018-07-18 03:00:16 +00:00
|
|
|
m_VideoDecoderCtx->flags |= AV_CODEC_FLAG_LOW_DELAY;
|
2018-07-13 09:28:10 +00:00
|
|
|
|
2018-09-02 22:45:29 +00:00
|
|
|
// Allow display of corrupt frames and frames missing references
|
|
|
|
m_VideoDecoderCtx->flags |= AV_CODEC_FLAG_OUTPUT_CORRUPT;
|
|
|
|
m_VideoDecoderCtx->flags2 |= AV_CODEC_FLAG2_SHOW_ALL;
|
|
|
|
|
2019-08-02 03:25:58 +00:00
|
|
|
// Report decoding errors to allow us to request a key frame
|
|
|
|
//
|
|
|
|
// With HEVC streams, FFmpeg can drop a frame (hwaccel->start_frame() fails)
|
|
|
|
// without telling us. Since we have an infinite GOP length, this causes artifacts
|
|
|
|
// on screen that persist for a long time. It's easy to cause this condition
|
|
|
|
// by using NVDEC and delaying 100 ms randomly in the render path so the decoder
|
|
|
|
// runs out of output buffers.
|
|
|
|
m_VideoDecoderCtx->err_recognition = AV_EF_EXPLODE;
|
|
|
|
|
2018-07-13 09:28:10 +00:00
|
|
|
// Enable slice multi-threading for software decoding
|
2019-04-12 05:27:20 +00:00
|
|
|
if (!isHardwareAccelerated()) {
|
2018-07-18 03:00:16 +00:00
|
|
|
m_VideoDecoderCtx->thread_type = FF_THREAD_SLICE;
|
|
|
|
m_VideoDecoderCtx->thread_count = qMin(MAX_SLICES, SDL_GetCPUCount());
|
2018-07-13 09:28:10 +00:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
// No threading for HW decode
|
2018-07-18 03:00:16 +00:00
|
|
|
m_VideoDecoderCtx->thread_count = 1;
|
2018-07-13 09:28:10 +00:00
|
|
|
}
|
2018-07-08 04:52:20 +00:00
|
|
|
|
|
|
|
// Setup decoding parameters
|
2019-04-12 05:27:20 +00:00
|
|
|
m_VideoDecoderCtx->width = params->width;
|
|
|
|
m_VideoDecoderCtx->height = params->height;
|
2019-04-16 04:56:01 +00:00
|
|
|
m_VideoDecoderCtx->pix_fmt = m_FrontendRenderer->getPreferredPixelFormat(params->videoFormat);
|
2018-08-03 04:37:46 +00:00
|
|
|
m_VideoDecoderCtx->get_format = ffGetFormat;
|
2018-07-08 04:52:20 +00:00
|
|
|
|
2020-02-09 01:47:26 +00:00
|
|
|
AVDictionary* options = nullptr;
|
|
|
|
|
2019-04-12 05:27:20 +00:00
|
|
|
// Allow the backend renderer to attach data to this decoder
|
2020-02-09 01:47:26 +00:00
|
|
|
if (!m_BackendRenderer->prepareDecoderContext(m_VideoDecoderCtx, &options)) {
|
2018-07-18 03:00:16 +00:00
|
|
|
return false;
|
2018-07-13 09:28:10 +00:00
|
|
|
}
|
|
|
|
|
2018-08-03 06:24:44 +00:00
|
|
|
// Nobody must override our ffGetFormat
|
|
|
|
SDL_assert(m_VideoDecoderCtx->get_format == ffGetFormat);
|
|
|
|
|
2018-08-03 05:28:59 +00:00
|
|
|
// Stash a pointer to this object in the context
|
|
|
|
SDL_assert(m_VideoDecoderCtx->opaque == nullptr);
|
|
|
|
m_VideoDecoderCtx->opaque = this;
|
|
|
|
|
2020-02-09 01:47:26 +00:00
|
|
|
int err = avcodec_open2(m_VideoDecoderCtx, decoder, &options);
|
|
|
|
av_dict_free(&options);
|
2018-07-08 04:52:20 +00:00
|
|
|
if (err < 0) {
|
|
|
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Unable to open decoder for format: %x",
|
2019-04-12 05:27:20 +00:00
|
|
|
params->videoFormat);
|
2018-07-18 03:00:16 +00:00
|
|
|
return false;
|
2018-07-08 04:52:20 +00:00
|
|
|
}
|
|
|
|
|
2018-08-03 04:37:46 +00:00
|
|
|
// FFMpeg doesn't completely initialize the codec until the codec
|
|
|
|
// config data comes in. This would be too late for us to change
|
|
|
|
// our minds on the selected video codec, so we'll do a trial run
|
|
|
|
// now to see if things will actually work when the video stream
|
|
|
|
// comes in.
|
2019-02-13 02:42:53 +00:00
|
|
|
if (testFrame) {
|
2019-11-06 01:08:25 +00:00
|
|
|
switch (params->videoFormat) {
|
|
|
|
case VIDEO_FORMAT_H264:
|
2021-03-05 23:47:04 +00:00
|
|
|
m_Pkt->data = (uint8_t*)k_H264TestFrame;
|
|
|
|
m_Pkt->size = sizeof(k_H264TestFrame);
|
2019-11-06 01:08:25 +00:00
|
|
|
break;
|
|
|
|
case VIDEO_FORMAT_H265:
|
2021-03-05 23:47:04 +00:00
|
|
|
m_Pkt->data = (uint8_t*)k_HEVCMainTestFrame;
|
|
|
|
m_Pkt->size = sizeof(k_HEVCMainTestFrame);
|
2019-11-06 01:08:25 +00:00
|
|
|
break;
|
|
|
|
case VIDEO_FORMAT_H265_MAIN10:
|
2021-03-05 23:47:04 +00:00
|
|
|
m_Pkt->data = (uint8_t*)k_HEVCMain10TestFrame;
|
|
|
|
m_Pkt->size = sizeof(k_HEVCMain10TestFrame);
|
2019-11-06 01:08:25 +00:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"No test frame for format: %x",
|
|
|
|
params->videoFormat);
|
|
|
|
return false;
|
2018-08-03 04:37:46 +00:00
|
|
|
}
|
|
|
|
|
2020-01-16 02:09:59 +00:00
|
|
|
AVFrame* frame = av_frame_alloc();
|
|
|
|
if (!frame) {
|
|
|
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Failed to allocate frame");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-01-26 23:36:22 +00:00
|
|
|
// Some decoders won't output on the first frame, so we'll submit
|
|
|
|
// a few test frames if we get an EAGAIN error.
|
|
|
|
for (int retries = 0; retries < 5; retries++) {
|
|
|
|
// Most FFmpeg decoders process input using a "push" model.
|
|
|
|
// We'll see those fail here if the format is not supported.
|
2021-03-05 23:47:04 +00:00
|
|
|
err = avcodec_send_packet(m_VideoDecoderCtx, m_Pkt);
|
2020-01-26 23:36:22 +00:00
|
|
|
if (err < 0) {
|
|
|
|
av_frame_free(&frame);
|
|
|
|
char errorstring[512];
|
|
|
|
av_strerror(err, errorstring, sizeof(errorstring));
|
|
|
|
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
|
2021-12-15 23:33:28 +00:00
|
|
|
"Test decode failed (avcodec_send_packet): %s", errorstring);
|
2020-01-26 23:36:22 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// A few FFmpeg decoders (h264_mmal) process here using a "pull" model.
|
|
|
|
// Those decoders will fail here if the format is not supported.
|
|
|
|
err = avcodec_receive_frame(m_VideoDecoderCtx, frame);
|
|
|
|
if (err == AVERROR(EAGAIN)) {
|
|
|
|
// Wait a little while to let the hardware work
|
|
|
|
SDL_Delay(100);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Done!
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-12-15 23:33:28 +00:00
|
|
|
if (err < 0) {
|
|
|
|
char errorstring[512];
|
|
|
|
av_strerror(err, errorstring, sizeof(errorstring));
|
2021-03-23 03:51:29 +00:00
|
|
|
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
|
2021-12-15 23:33:28 +00:00
|
|
|
"Test decode failed (avcodec_receive_frame): %s", errorstring);
|
2021-03-23 03:51:29 +00:00
|
|
|
av_frame_free(&frame);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2021-12-15 23:33:28 +00:00
|
|
|
// Allow the renderer to do any validation it wants on this frame
|
|
|
|
if (!m_FrontendRenderer->testRenderFrame(frame)) {
|
2020-01-16 02:09:59 +00:00
|
|
|
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
|
2021-12-15 23:33:28 +00:00
|
|
|
"Test decode failed (testRenderFrame)");
|
|
|
|
av_frame_free(&frame);
|
2020-01-16 02:09:59 +00:00
|
|
|
return false;
|
|
|
|
}
|
2021-12-15 23:33:28 +00:00
|
|
|
|
|
|
|
av_frame_free(&frame);
|
2018-08-03 04:37:46 +00:00
|
|
|
}
|
2018-10-13 00:59:53 +00:00
|
|
|
else {
|
2019-04-12 05:27:20 +00:00
|
|
|
if ((params->videoFormat & VIDEO_FORMAT_MASK_H264) &&
|
|
|
|
!(m_BackendRenderer->getDecoderCapabilities() & CAPABILITY_REFERENCE_FRAME_INVALIDATION_AVC)) {
|
2018-10-13 00:59:53 +00:00
|
|
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Using H.264 SPS fixup");
|
|
|
|
m_NeedsSpsFixup = true;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
m_NeedsSpsFixup = false;
|
|
|
|
}
|
2019-02-13 02:43:38 +00:00
|
|
|
|
2019-04-12 05:27:20 +00:00
|
|
|
// Tell overlay manager to use this frontend renderer
|
|
|
|
Session::get()->getOverlayManager().setOverlayRenderer(m_FrontendRenderer);
|
2018-10-13 00:59:53 +00:00
|
|
|
}
|
2018-08-03 04:37:46 +00:00
|
|
|
|
2018-07-18 03:00:16 +00:00
|
|
|
return true;
|
2018-07-08 04:52:20 +00:00
|
|
|
}
|
|
|
|
|
2018-09-25 07:47:59 +00:00
|
|
|
void FFmpegVideoDecoder::addVideoStats(VIDEO_STATS& src, VIDEO_STATS& dst)
|
|
|
|
{
|
|
|
|
dst.receivedFrames += src.receivedFrames;
|
|
|
|
dst.decodedFrames += src.decodedFrames;
|
|
|
|
dst.renderedFrames += src.renderedFrames;
|
2019-01-22 01:43:15 +00:00
|
|
|
dst.totalFrames += src.totalFrames;
|
2018-09-25 07:47:59 +00:00
|
|
|
dst.networkDroppedFrames += src.networkDroppedFrames;
|
|
|
|
dst.pacerDroppedFrames += src.pacerDroppedFrames;
|
|
|
|
dst.totalReassemblyTime += src.totalReassemblyTime;
|
|
|
|
dst.totalDecodeTime += src.totalDecodeTime;
|
2018-12-25 20:09:45 +00:00
|
|
|
dst.totalPacerTime += src.totalPacerTime;
|
2018-09-25 07:47:59 +00:00
|
|
|
dst.totalRenderTime += src.totalRenderTime;
|
|
|
|
|
2021-05-22 18:57:12 +00:00
|
|
|
if (!LiGetEstimatedRttInfo(&dst.lastRtt, &dst.lastRttVariance)) {
|
|
|
|
dst.lastRtt = 0;
|
|
|
|
dst.lastRttVariance = 0;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Our logic to determine if RTT is valid depends on us never
|
|
|
|
// getting an RTT of 0. ENet currently ensures RTTs are >= 1.
|
|
|
|
SDL_assert(dst.lastRtt > 0);
|
|
|
|
}
|
|
|
|
|
2018-09-25 07:47:59 +00:00
|
|
|
Uint32 now = SDL_GetTicks();
|
|
|
|
|
|
|
|
// Initialize the measurement start point if this is the first video stat window
|
|
|
|
if (!dst.measurementStartTimestamp) {
|
|
|
|
dst.measurementStartTimestamp = src.measurementStartTimestamp;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The following code assumes the global measure was already started first
|
|
|
|
SDL_assert(dst.measurementStartTimestamp <= src.measurementStartTimestamp);
|
|
|
|
|
2019-01-22 01:43:15 +00:00
|
|
|
dst.totalFps = (float)dst.totalFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
|
2018-09-25 07:47:59 +00:00
|
|
|
dst.receivedFps = (float)dst.receivedFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
|
|
|
|
dst.decodedFps = (float)dst.decodedFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
|
|
|
|
dst.renderedFps = (float)dst.renderedFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
|
|
|
|
}
|
|
|
|
|
2019-01-20 07:05:56 +00:00
|
|
|
void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output)
|
2018-09-25 07:47:59 +00:00
|
|
|
{
|
2019-01-20 07:05:56 +00:00
|
|
|
int offset = 0;
|
2020-08-21 03:52:05 +00:00
|
|
|
const char* codecString;
|
2018-09-25 07:47:59 +00:00
|
|
|
|
2019-01-20 07:05:56 +00:00
|
|
|
// Start with an empty string
|
|
|
|
output[offset] = 0;
|
|
|
|
|
2020-08-21 03:52:05 +00:00
|
|
|
switch (m_VideoFormat)
|
|
|
|
{
|
|
|
|
case VIDEO_FORMAT_H264:
|
|
|
|
codecString = "H.264";
|
|
|
|
break;
|
|
|
|
|
|
|
|
case VIDEO_FORMAT_H265:
|
|
|
|
codecString = "HEVC";
|
|
|
|
break;
|
|
|
|
|
|
|
|
case VIDEO_FORMAT_H265_MAIN10:
|
|
|
|
codecString = "HEVC Main 10";
|
|
|
|
break;
|
|
|
|
|
|
|
|
default:
|
|
|
|
SDL_assert(false);
|
|
|
|
codecString = "UNKNOWN";
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2019-01-20 07:05:56 +00:00
|
|
|
if (stats.receivedFps > 0) {
|
2020-08-21 03:52:05 +00:00
|
|
|
if (m_VideoDecoderCtx != nullptr) {
|
|
|
|
offset += sprintf(&output[offset],
|
|
|
|
"Video stream: %dx%d %.2f FPS (Codec: %s)\n",
|
|
|
|
m_VideoDecoderCtx->width,
|
|
|
|
m_VideoDecoderCtx->height,
|
|
|
|
stats.totalFps,
|
|
|
|
codecString);
|
|
|
|
}
|
|
|
|
|
2019-01-20 07:05:56 +00:00
|
|
|
offset += sprintf(&output[offset],
|
2019-01-22 01:43:15 +00:00
|
|
|
"Incoming frame rate from network: %.2f FPS\n"
|
2019-01-20 07:05:56 +00:00
|
|
|
"Decoding frame rate: %.2f FPS\n"
|
|
|
|
"Rendering frame rate: %.2f FPS\n",
|
|
|
|
stats.receivedFps,
|
|
|
|
stats.decodedFps,
|
|
|
|
stats.renderedFps);
|
2018-09-25 07:47:59 +00:00
|
|
|
}
|
2019-01-20 07:05:56 +00:00
|
|
|
|
2018-09-25 07:47:59 +00:00
|
|
|
if (stats.renderedFrames != 0) {
|
2021-05-15 19:40:31 +00:00
|
|
|
char rttString[32];
|
|
|
|
|
2021-05-22 18:57:12 +00:00
|
|
|
if (stats.lastRtt != 0) {
|
|
|
|
sprintf(rttString, "%u ms (variance: %u ms)", stats.lastRtt, stats.lastRttVariance);
|
2021-05-15 19:40:31 +00:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
sprintf(rttString, "N/A");
|
|
|
|
}
|
|
|
|
|
2019-01-20 07:05:56 +00:00
|
|
|
offset += sprintf(&output[offset],
|
|
|
|
"Frames dropped by your network connection: %.2f%%\n"
|
2019-02-17 07:04:25 +00:00
|
|
|
"Frames dropped due to network jitter: %.2f%%\n"
|
2021-05-15 19:40:31 +00:00
|
|
|
"Average network latency: %s\n"
|
2019-01-22 01:43:15 +00:00
|
|
|
"Average decoding time: %.2f ms\n"
|
|
|
|
"Average frame queue delay: %.2f ms\n"
|
2019-01-23 04:55:58 +00:00
|
|
|
"Average rendering time (including monitor V-sync latency): %.2f ms\n",
|
2019-01-22 01:43:15 +00:00
|
|
|
(float)stats.networkDroppedFrames / stats.totalFrames * 100,
|
|
|
|
(float)stats.pacerDroppedFrames / stats.decodedFrames * 100,
|
2021-05-15 19:40:31 +00:00
|
|
|
rttString,
|
2019-01-20 07:05:56 +00:00
|
|
|
(float)stats.totalDecodeTime / stats.decodedFrames,
|
|
|
|
(float)stats.totalPacerTime / stats.renderedFrames,
|
2019-01-22 01:43:15 +00:00
|
|
|
(float)stats.totalRenderTime / stats.renderedFrames);
|
2019-01-20 07:05:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void FFmpegVideoDecoder::logVideoStats(VIDEO_STATS& stats, const char* title)
|
|
|
|
{
|
|
|
|
if (stats.renderedFps > 0 || stats.renderedFrames != 0) {
|
|
|
|
char videoStatsStr[512];
|
|
|
|
stringifyVideoStats(stats, videoStatsStr);
|
|
|
|
|
2018-09-25 07:47:59 +00:00
|
|
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
2019-01-20 07:05:56 +00:00
|
|
|
"%s", title);
|
2018-09-25 07:47:59 +00:00
|
|
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
2019-01-20 07:05:56 +00:00
|
|
|
"----------------------------------------------------------\n%s",
|
|
|
|
videoStatsStr);
|
2018-09-25 07:47:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-27 04:54:29 +00:00
|
|
|
IFFmpegRenderer* FFmpegVideoDecoder::createHwAccelRenderer(const AVCodecHWConfig* hwDecodeCfg, int pass)
|
2018-08-03 04:57:16 +00:00
|
|
|
{
|
|
|
|
if (!(hwDecodeCfg->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX)) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2019-06-27 04:54:29 +00:00
|
|
|
// First pass using our top-tier hwaccel implementations
|
|
|
|
if (pass == 0) {
|
|
|
|
switch (hwDecodeCfg->device_type) {
|
2018-08-03 04:57:16 +00:00
|
|
|
#ifdef Q_OS_WIN32
|
2019-06-27 04:54:29 +00:00
|
|
|
case AV_HWDEVICE_TYPE_DXVA2:
|
|
|
|
return new DXVA2Renderer();
|
2018-08-03 04:57:16 +00:00
|
|
|
#endif
|
|
|
|
#ifdef Q_OS_DARWIN
|
2019-06-27 04:54:29 +00:00
|
|
|
case AV_HWDEVICE_TYPE_VIDEOTOOLBOX:
|
|
|
|
return VTRendererFactory::createRenderer();
|
2018-08-03 04:57:16 +00:00
|
|
|
#endif
|
|
|
|
#ifdef HAVE_LIBVA
|
2019-06-27 04:54:29 +00:00
|
|
|
case AV_HWDEVICE_TYPE_VAAPI:
|
|
|
|
return new VAAPIRenderer();
|
2018-08-03 09:11:44 +00:00
|
|
|
#endif
|
|
|
|
#ifdef HAVE_LIBVDPAU
|
2019-06-27 04:54:29 +00:00
|
|
|
case AV_HWDEVICE_TYPE_VDPAU:
|
|
|
|
return new VDPAURenderer();
|
2021-01-30 16:00:01 +00:00
|
|
|
#endif
|
|
|
|
#ifdef HAVE_DRM
|
|
|
|
case AV_HWDEVICE_TYPE_DRM:
|
|
|
|
return new DrmRenderer();
|
2018-08-03 04:57:16 +00:00
|
|
|
#endif
|
2019-06-27 04:54:29 +00:00
|
|
|
default:
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Second pass for our second-tier hwaccel implementations
|
|
|
|
else if (pass == 1) {
|
|
|
|
switch (hwDecodeCfg->device_type) {
|
2021-12-07 00:22:39 +00:00
|
|
|
#ifdef HAVE_CUDA
|
2019-06-27 04:54:29 +00:00
|
|
|
case AV_HWDEVICE_TYPE_CUDA:
|
2021-12-07 00:22:39 +00:00
|
|
|
// CUDA should only be used to cover the NVIDIA+Wayland case
|
2019-06-27 04:54:29 +00:00
|
|
|
return new CUDARenderer();
|
2021-12-07 00:22:39 +00:00
|
|
|
#endif
|
2019-06-27 04:54:29 +00:00
|
|
|
default:
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
SDL_assert(false);
|
2018-08-03 04:57:16 +00:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-25 04:26:38 +00:00
|
|
|
bool FFmpegVideoDecoder::tryInitializeRenderer(const AVCodec* decoder,
|
2019-04-19 03:02:14 +00:00
|
|
|
PDECODER_PARAMETERS params,
|
|
|
|
const AVCodecHWConfig* hwConfig,
|
|
|
|
std::function<IFFmpegRenderer*()> createRendererFunc)
|
|
|
|
{
|
|
|
|
m_HwDecodeCfg = hwConfig;
|
|
|
|
|
2021-03-23 03:51:29 +00:00
|
|
|
// i == 0 - Indirect via EGL frontend with zero-copy DMA-BUF passing
|
|
|
|
// i == 1 - Direct rendering or indirect via SDL read-back
|
2021-05-01 01:52:09 +00:00
|
|
|
#ifdef HAVE_EGL
|
2021-03-23 03:51:29 +00:00
|
|
|
for (int i = 0; i < 2; i++) {
|
2021-05-01 01:52:09 +00:00
|
|
|
#else
|
|
|
|
for (int i = 1; i < 2; i++) {
|
|
|
|
#endif
|
2021-03-23 03:51:29 +00:00
|
|
|
SDL_assert(m_BackendRenderer == nullptr);
|
|
|
|
if ((m_BackendRenderer = createRendererFunc()) != nullptr &&
|
|
|
|
m_BackendRenderer->initialize(params) &&
|
|
|
|
completeInitialization(decoder, params, m_TestOnly || m_BackendRenderer->needsTestFrame(), i == 0 /* EGL */)) {
|
|
|
|
if (m_TestOnly) {
|
|
|
|
// This decoder is only for testing capabilities, so don't bother
|
|
|
|
// creating a usable renderer
|
2019-04-19 03:02:14 +00:00
|
|
|
return true;
|
|
|
|
}
|
2021-03-23 03:51:29 +00:00
|
|
|
|
|
|
|
if (m_BackendRenderer->needsTestFrame()) {
|
|
|
|
// The test worked, so now let's initialize it for real
|
2019-04-19 03:02:14 +00:00
|
|
|
reset();
|
2021-03-23 03:51:29 +00:00
|
|
|
if ((m_BackendRenderer = createRendererFunc()) != nullptr &&
|
|
|
|
m_BackendRenderer->initialize(params) &&
|
|
|
|
completeInitialization(decoder, params, false, i == 0 /* EGL */)) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
SDL_LogCritical(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Decoder failed to initialize after successful test");
|
|
|
|
reset();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// No test required. Good to go now.
|
|
|
|
return true;
|
2019-04-19 03:02:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
2021-03-23 03:51:29 +00:00
|
|
|
// Failed to initialize, so keep looking
|
|
|
|
reset();
|
2019-04-19 03:02:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-23 03:51:29 +00:00
|
|
|
// reset() must be called before we reach this point!
|
|
|
|
SDL_assert(m_BackendRenderer == nullptr);
|
2019-04-19 03:02:14 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2021-02-03 01:05:27 +00:00
|
|
|
#define TRY_PREFERRED_PIXEL_FORMAT(RENDERER_TYPE) \
|
|
|
|
{ \
|
|
|
|
RENDERER_TYPE renderer; \
|
|
|
|
if (renderer.getPreferredPixelFormat(params->videoFormat) == decoder->pix_fmts[i]) { \
|
|
|
|
if (tryInitializeRenderer(decoder, params, nullptr, \
|
|
|
|
[]() -> IFFmpegRenderer* { return new RENDERER_TYPE(); })) { \
|
2021-02-03 01:45:22 +00:00
|
|
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, \
|
|
|
|
"Chose " #RENDERER_TYPE " for codec %s due to preferred pixel format: 0x%x", \
|
|
|
|
decoder->name, decoder->pix_fmts[i]); \
|
2021-02-03 01:05:27 +00:00
|
|
|
return true; \
|
|
|
|
} \
|
|
|
|
} \
|
|
|
|
}
|
|
|
|
|
|
|
|
#define TRY_SUPPORTED_PIXEL_FORMAT(RENDERER_TYPE) \
|
|
|
|
{ \
|
|
|
|
RENDERER_TYPE renderer; \
|
|
|
|
if (renderer.isPixelFormatSupported(params->videoFormat, decoder->pix_fmts[i])) { \
|
|
|
|
if (tryInitializeRenderer(decoder, params, nullptr, \
|
|
|
|
[]() -> IFFmpegRenderer* { return new RENDERER_TYPE(); })) { \
|
2021-02-03 01:45:22 +00:00
|
|
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, \
|
|
|
|
"Chose " #RENDERER_TYPE " for codec %s due to compatible pixel format: 0x%x", \
|
|
|
|
decoder->name, decoder->pix_fmts[i]); \
|
2021-02-03 01:05:27 +00:00
|
|
|
return true; \
|
|
|
|
} \
|
|
|
|
} \
|
|
|
|
}
|
|
|
|
|
|
|
|
bool FFmpegVideoDecoder::tryInitializeRendererForDecoderByName(const char *decoderName,
|
|
|
|
PDECODER_PARAMETERS params)
|
|
|
|
{
|
2021-05-25 04:26:38 +00:00
|
|
|
const AVCodec* decoder = avcodec_find_decoder_by_name(decoderName);
|
2021-02-03 01:05:27 +00:00
|
|
|
if (decoder == nullptr) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2021-02-03 01:45:22 +00:00
|
|
|
// This might be a hwaccel decoder, so try any hw configs first
|
|
|
|
for (int i = 0;; i++) {
|
|
|
|
const AVCodecHWConfig *config = avcodec_get_hw_config(decoder, i);
|
|
|
|
if (!config) {
|
|
|
|
// No remaing hwaccel options
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Initialize the hardware codec and submit a test frame if the renderer needs it
|
|
|
|
if (tryInitializeRenderer(decoder, params, config,
|
|
|
|
[config]() -> IFFmpegRenderer* { return createHwAccelRenderer(config, 0); })) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-03 01:05:27 +00:00
|
|
|
if (decoder->pix_fmts == NULL) {
|
|
|
|
// Supported output pixel formats are unknown. We'll just try SDL and hope it can cope.
|
|
|
|
return tryInitializeRenderer(decoder, params, nullptr,
|
|
|
|
[]() -> IFFmpegRenderer* { return new SdlRenderer(); });
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check if any of our decoders prefer any of the pixel formats first
|
|
|
|
for (int i = 0; decoder->pix_fmts[i] != AV_PIX_FMT_NONE; i++) {
|
|
|
|
#ifdef HAVE_DRM
|
|
|
|
TRY_PREFERRED_PIXEL_FORMAT(DrmRenderer);
|
|
|
|
#endif
|
|
|
|
#ifdef HAVE_MMAL
|
|
|
|
TRY_PREFERRED_PIXEL_FORMAT(MmalRenderer);
|
|
|
|
#endif
|
2021-12-20 02:58:50 +00:00
|
|
|
// HACK: Avoid using YUV420P on h264_mmal. It can cause a deadlock inside the MMAL libraries.
|
|
|
|
// Even if it didn't completely deadlock us, the performance would likely be atrocious.
|
|
|
|
if (strcmp(decoderName, "h264_mmal") != 0) {
|
|
|
|
TRY_PREFERRED_PIXEL_FORMAT(SdlRenderer);
|
|
|
|
}
|
2021-02-03 01:05:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Nothing prefers any of them. Let's see if anyone will tolerate one.
|
|
|
|
for (int i = 0; decoder->pix_fmts[i] != AV_PIX_FMT_NONE; i++) {
|
|
|
|
#ifdef HAVE_DRM
|
|
|
|
TRY_SUPPORTED_PIXEL_FORMAT(DrmRenderer);
|
|
|
|
#endif
|
|
|
|
#ifdef HAVE_MMAL
|
|
|
|
TRY_SUPPORTED_PIXEL_FORMAT(MmalRenderer);
|
|
|
|
#endif
|
2021-12-20 02:58:50 +00:00
|
|
|
// HACK: See comment above
|
|
|
|
if (strcmp(decoderName, "h264_mmal") != 0) {
|
|
|
|
TRY_SUPPORTED_PIXEL_FORMAT(SdlRenderer);
|
|
|
|
}
|
2021-02-03 01:05:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// If we made it here, we couldn't find anything
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-04-12 05:27:20 +00:00
|
|
|
bool FFmpegVideoDecoder::initialize(PDECODER_PARAMETERS params)
|
2018-08-03 04:37:46 +00:00
|
|
|
{
|
2019-02-14 02:34:59 +00:00
|
|
|
// Increase log level until the first frame is decoded
|
2018-08-03 05:48:40 +00:00
|
|
|
av_log_set_level(AV_LOG_DEBUG);
|
|
|
|
|
2020-01-26 22:40:48 +00:00
|
|
|
// First try decoders that the user has manually specified via environment variables.
|
2021-02-03 01:05:27 +00:00
|
|
|
// These must output surfaces in one of the formats that one of our renderers supports,
|
2020-01-26 22:40:48 +00:00
|
|
|
// which is currently:
|
2021-02-03 01:05:27 +00:00
|
|
|
// - AV_PIX_FMT_DRM_PRIME
|
|
|
|
// - AV_PIX_FMT_MMAL
|
|
|
|
// - AV_PIX_FMT_YUV420P
|
2020-01-26 22:40:48 +00:00
|
|
|
// - AV_PIX_FMT_NV12
|
|
|
|
// - AV_PIX_FMT_NV21
|
|
|
|
{
|
|
|
|
QString h264DecoderHint = qgetenv("H264_DECODER_HINT");
|
|
|
|
if (!h264DecoderHint.isEmpty() && (params->videoFormat & VIDEO_FORMAT_MASK_H264)) {
|
|
|
|
QByteArray decoderString = h264DecoderHint.toLocal8Bit();
|
2021-02-03 01:05:27 +00:00
|
|
|
if (tryInitializeRendererForDecoderByName(decoderString.constData(), params)) {
|
2020-01-26 22:40:48 +00:00
|
|
|
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Using custom H.264 decoder (H264_DECODER_HINT): %s",
|
|
|
|
decoderString.constData());
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Custom H.264 decoder (H264_DECODER_HINT) failed to load: %s",
|
|
|
|
decoderString.constData());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
{
|
|
|
|
QString hevcDecoderHint = qgetenv("HEVC_DECODER_HINT");
|
|
|
|
if (!hevcDecoderHint.isEmpty() && (params->videoFormat & VIDEO_FORMAT_MASK_H265)) {
|
|
|
|
QByteArray decoderString = hevcDecoderHint.toLocal8Bit();
|
2021-02-03 01:05:27 +00:00
|
|
|
if (tryInitializeRendererForDecoderByName(decoderString.constData(), params)) {
|
2020-01-26 22:40:48 +00:00
|
|
|
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Using custom HEVC decoder (HEVC_DECODER_HINT): %s",
|
|
|
|
decoderString.constData());
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Custom HEVC decoder (HEVC_DECODER_HINT) failed to load: %s",
|
|
|
|
decoderString.constData());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-25 04:26:38 +00:00
|
|
|
const AVCodec* decoder;
|
2020-01-26 22:40:48 +00:00
|
|
|
|
2019-04-12 05:27:20 +00:00
|
|
|
if (params->videoFormat & VIDEO_FORMAT_MASK_H264) {
|
2018-08-03 04:37:46 +00:00
|
|
|
decoder = avcodec_find_decoder(AV_CODEC_ID_H264);
|
|
|
|
}
|
2019-04-12 05:27:20 +00:00
|
|
|
else if (params->videoFormat & VIDEO_FORMAT_MASK_H265) {
|
2018-08-03 04:37:46 +00:00
|
|
|
decoder = avcodec_find_decoder(AV_CODEC_ID_HEVC);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
Q_ASSERT(false);
|
|
|
|
decoder = nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!decoder) {
|
|
|
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Unable to find decoder for format: %x",
|
2019-04-12 05:27:20 +00:00
|
|
|
params->videoFormat);
|
2018-08-03 04:37:46 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-04-20 03:57:21 +00:00
|
|
|
// Look for a hardware decoder first unless software-only
|
|
|
|
if (params->vds != StreamingPreferences::VDS_FORCE_SOFTWARE) {
|
2019-06-27 04:54:29 +00:00
|
|
|
// Look for the first matching hwaccel hardware decoder (pass 0)
|
2019-04-20 03:57:21 +00:00
|
|
|
for (int i = 0;; i++) {
|
|
|
|
const AVCodecHWConfig *config = avcodec_get_hw_config(decoder, i);
|
|
|
|
if (!config) {
|
|
|
|
// No remaing hwaccel options
|
|
|
|
break;
|
|
|
|
}
|
2018-08-03 04:37:46 +00:00
|
|
|
|
2019-04-20 03:57:21 +00:00
|
|
|
// Initialize the hardware codec and submit a test frame if the renderer needs it
|
|
|
|
if (tryInitializeRenderer(decoder, params, config,
|
2019-06-27 04:54:29 +00:00
|
|
|
[config]() -> IFFmpegRenderer* { return createHwAccelRenderer(config, 0); })) {
|
2019-04-20 03:57:21 +00:00
|
|
|
return true;
|
|
|
|
}
|
2018-08-03 04:37:46 +00:00
|
|
|
}
|
2019-04-20 03:57:21 +00:00
|
|
|
|
|
|
|
// Continue with special non-hwaccel hardware decoders
|
|
|
|
if (params->videoFormat & VIDEO_FORMAT_MASK_H264) {
|
2021-12-28 22:28:04 +00:00
|
|
|
QList<const char *> knownAvcCodecs = {
|
|
|
|
#ifdef HAVE_MMAL
|
|
|
|
"h264_mmal",
|
|
|
|
#endif
|
|
|
|
"h264_rkmpp",
|
|
|
|
"h264_nvmpi",
|
|
|
|
#ifndef HAVE_MMAL
|
|
|
|
// Only enable V4L2M2M by default on non-MMAL (RPi) builds. The performance
|
|
|
|
// of the V4L2M2M wrapper around MMAL is not enough for 1080p 60 FPS, so we
|
|
|
|
// would rather show the missing hardware acceleration warning when the user
|
|
|
|
// is in Full KMS mode rather than try to use a poorly performing hwaccel.
|
|
|
|
// See discussion on https://github.com/jc-kynesim/rpi-ffmpeg/pull/25
|
|
|
|
"h264_v4l2m2m",
|
|
|
|
#endif
|
|
|
|
};
|
2021-02-03 01:05:27 +00:00
|
|
|
for (const char* codec : knownAvcCodecs) {
|
|
|
|
if (tryInitializeRendererForDecoderByName(codec, params)) {
|
|
|
|
return true;
|
|
|
|
}
|
2019-04-20 03:57:21 +00:00
|
|
|
}
|
2019-04-16 08:20:21 +00:00
|
|
|
}
|
2021-02-03 01:05:27 +00:00
|
|
|
else {
|
|
|
|
QList<const char *> knownHevcCodecs = { "hevc_rkmpp", "hevc_nvmpi", "hevc_v4l2m2m" };
|
|
|
|
for (const char* codec : knownHevcCodecs) {
|
|
|
|
if (tryInitializeRendererForDecoderByName(codec, params)) {
|
|
|
|
return true;
|
|
|
|
}
|
2020-01-11 04:52:16 +00:00
|
|
|
}
|
2019-04-21 05:22:37 +00:00
|
|
|
}
|
2019-06-27 04:54:29 +00:00
|
|
|
|
|
|
|
// Look for the first matching hwaccel hardware decoder (pass 1)
|
|
|
|
// This picks up "second-tier" hwaccels like CUDA.
|
|
|
|
for (int i = 0;; i++) {
|
|
|
|
const AVCodecHWConfig *config = avcodec_get_hw_config(decoder, i);
|
|
|
|
if (!config) {
|
|
|
|
// No remaing hwaccel options
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Initialize the hardware codec and submit a test frame if the renderer needs it
|
|
|
|
if (tryInitializeRenderer(decoder, params, config,
|
|
|
|
[config]() -> IFFmpegRenderer* { return createHwAccelRenderer(config, 1); })) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
2019-04-20 03:57:21 +00:00
|
|
|
}
|
2019-04-16 08:20:21 +00:00
|
|
|
|
2019-04-20 03:57:21 +00:00
|
|
|
// Fallback to software if no matching hardware decoder was found
|
|
|
|
// and if software fallback is allowed
|
|
|
|
if (params->vds != StreamingPreferences::VDS_FORCE_HARDWARE) {
|
|
|
|
if (tryInitializeRenderer(decoder, params, nullptr,
|
|
|
|
[]() -> IFFmpegRenderer* { return new SdlRenderer(); })) {
|
|
|
|
return true;
|
|
|
|
}
|
2019-04-16 04:56:01 +00:00
|
|
|
}
|
2019-04-19 03:02:14 +00:00
|
|
|
|
|
|
|
// No decoder worked
|
|
|
|
return false;
|
2018-08-03 04:37:46 +00:00
|
|
|
}
|
|
|
|
|
2018-10-13 00:59:53 +00:00
|
|
|
void FFmpegVideoDecoder::writeBuffer(PLENTRY entry, int& offset)
|
|
|
|
{
|
|
|
|
if (m_NeedsSpsFixup && entry->bufferType == BUFFER_TYPE_SPS) {
|
|
|
|
const char naluHeader[] = {0x00, 0x00, 0x00, 0x01};
|
|
|
|
h264_stream_t* stream = h264_new();
|
|
|
|
int nalStart, nalEnd;
|
|
|
|
|
|
|
|
// Read the old NALU
|
|
|
|
find_nal_unit((uint8_t*)entry->data, entry->length, &nalStart, &nalEnd);
|
|
|
|
read_nal_unit(stream,
|
|
|
|
(unsigned char *)&entry->data[nalStart],
|
|
|
|
nalEnd - nalStart);
|
|
|
|
|
|
|
|
SDL_assert(nalStart == sizeof(naluHeader));
|
|
|
|
SDL_assert(nalEnd == entry->length);
|
|
|
|
|
|
|
|
// Fixup the SPS to what OS X needs to use hardware acceleration
|
|
|
|
stream->sps->num_ref_frames = 1;
|
|
|
|
stream->sps->vui.max_dec_frame_buffering = 1;
|
|
|
|
|
|
|
|
int initialOffset = offset;
|
|
|
|
|
|
|
|
// Copy the modified NALU data. This assumes a 3 byte prefix and
|
|
|
|
// begins writing from the 2nd byte, so we must write the data
|
|
|
|
// first, then go back and write the Annex B prefix.
|
|
|
|
offset += write_nal_unit(stream, (uint8_t*)&m_DecodeBuffer.data()[initialOffset + 3],
|
|
|
|
MAX_SPS_EXTRA_SIZE + entry->length - sizeof(naluHeader));
|
|
|
|
|
|
|
|
// Copy the NALU prefix over from the original SPS
|
|
|
|
memcpy(&m_DecodeBuffer.data()[initialOffset], naluHeader, sizeof(naluHeader));
|
|
|
|
offset += sizeof(naluHeader);
|
|
|
|
|
|
|
|
h264_free(stream);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Write the buffer as-is
|
|
|
|
memcpy(&m_DecodeBuffer.data()[offset],
|
|
|
|
entry->data,
|
|
|
|
entry->length);
|
|
|
|
offset += entry->length;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-18 03:00:16 +00:00
|
|
|
int FFmpegVideoDecoder::submitDecodeUnit(PDECODE_UNIT du)
|
2018-07-08 04:52:20 +00:00
|
|
|
{
|
|
|
|
PLENTRY entry = du->bufferList;
|
|
|
|
int err;
|
2021-12-12 00:15:49 +00:00
|
|
|
bool submittedFrame = false;
|
2018-07-08 04:52:20 +00:00
|
|
|
|
2019-02-13 02:42:53 +00:00
|
|
|
SDL_assert(!m_TestOnly);
|
|
|
|
|
2019-01-20 07:05:56 +00:00
|
|
|
if (!m_LastFrameNumber) {
|
|
|
|
m_ActiveWndVideoStats.measurementStartTimestamp = SDL_GetTicks();
|
|
|
|
m_LastFrameNumber = du->frameNumber;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Any frame number greater than m_LastFrameNumber + 1 represents a dropped frame
|
|
|
|
m_ActiveWndVideoStats.networkDroppedFrames += du->frameNumber - (m_LastFrameNumber + 1);
|
2019-01-22 01:43:15 +00:00
|
|
|
m_ActiveWndVideoStats.totalFrames += du->frameNumber - (m_LastFrameNumber + 1);
|
2019-01-20 07:05:56 +00:00
|
|
|
m_LastFrameNumber = du->frameNumber;
|
|
|
|
}
|
|
|
|
|
2018-09-25 07:47:59 +00:00
|
|
|
// Flip stats windows roughly every second
|
2019-01-20 07:05:56 +00:00
|
|
|
if (SDL_TICKS_PASSED(SDL_GetTicks(), m_ActiveWndVideoStats.measurementStartTimestamp + 1000)) {
|
|
|
|
// Update overlay stats if it's enabled
|
2019-02-13 02:43:38 +00:00
|
|
|
if (Session::get()->getOverlayManager().isOverlayEnabled(Overlay::OverlayDebug)) {
|
2019-01-20 07:05:56 +00:00
|
|
|
VIDEO_STATS lastTwoWndStats = {};
|
|
|
|
addVideoStats(m_LastWndVideoStats, lastTwoWndStats);
|
|
|
|
addVideoStats(m_ActiveWndVideoStats, lastTwoWndStats);
|
|
|
|
|
2019-02-13 02:43:38 +00:00
|
|
|
stringifyVideoStats(lastTwoWndStats, Session::get()->getOverlayManager().getOverlayText(Overlay::OverlayDebug));
|
|
|
|
Session::get()->getOverlayManager().setOverlayTextUpdated(Overlay::OverlayDebug);
|
2019-01-20 07:05:56 +00:00
|
|
|
}
|
2018-09-25 07:47:59 +00:00
|
|
|
|
|
|
|
// Accumulate these values into the global stats
|
|
|
|
addVideoStats(m_ActiveWndVideoStats, m_GlobalVideoStats);
|
|
|
|
|
|
|
|
// Move this window into the last window slot and clear it for next window
|
|
|
|
SDL_memcpy(&m_LastWndVideoStats, &m_ActiveWndVideoStats, sizeof(m_ActiveWndVideoStats));
|
|
|
|
SDL_zero(m_ActiveWndVideoStats);
|
|
|
|
m_ActiveWndVideoStats.measurementStartTimestamp = SDL_GetTicks();
|
|
|
|
}
|
|
|
|
|
|
|
|
m_ActiveWndVideoStats.receivedFrames++;
|
2019-01-22 01:43:15 +00:00
|
|
|
m_ActiveWndVideoStats.totalFrames++;
|
2018-09-25 07:47:59 +00:00
|
|
|
|
2018-10-13 00:59:53 +00:00
|
|
|
int requiredBufferSize = du->fullLength;
|
|
|
|
if (du->frameType == FRAME_TYPE_IDR) {
|
|
|
|
// Add some extra space in case we need to do an SPS fixup
|
|
|
|
requiredBufferSize += MAX_SPS_EXTRA_SIZE;
|
|
|
|
}
|
2019-05-03 05:54:18 +00:00
|
|
|
|
|
|
|
// Ensure the decoder buffer is large enough
|
|
|
|
m_DecodeBuffer.reserve(requiredBufferSize + AV_INPUT_BUFFER_PADDING_SIZE);
|
2018-07-08 04:52:20 +00:00
|
|
|
|
|
|
|
int offset = 0;
|
|
|
|
while (entry != nullptr) {
|
2018-10-13 00:59:53 +00:00
|
|
|
writeBuffer(entry, offset);
|
2018-07-08 04:52:20 +00:00
|
|
|
entry = entry->next;
|
|
|
|
}
|
|
|
|
|
2021-03-05 23:47:04 +00:00
|
|
|
m_Pkt->data = reinterpret_cast<uint8_t*>(m_DecodeBuffer.data());
|
|
|
|
m_Pkt->size = offset;
|
2018-07-08 04:52:20 +00:00
|
|
|
|
2020-05-26 01:31:19 +00:00
|
|
|
if (du->frameType == FRAME_TYPE_IDR) {
|
2021-03-05 23:47:04 +00:00
|
|
|
m_Pkt->flags = AV_PKT_FLAG_KEY;
|
2020-05-26 01:31:19 +00:00
|
|
|
}
|
|
|
|
else {
|
2021-03-05 23:47:04 +00:00
|
|
|
m_Pkt->flags = 0;
|
2020-05-26 01:31:19 +00:00
|
|
|
}
|
|
|
|
|
2020-12-31 22:10:01 +00:00
|
|
|
m_ActiveWndVideoStats.totalReassemblyTime += du->enqueueTimeMs - du->receiveTimeMs;
|
2018-09-25 07:47:59 +00:00
|
|
|
|
2021-03-05 23:47:04 +00:00
|
|
|
err = avcodec_send_packet(m_VideoDecoderCtx, m_Pkt);
|
2018-07-08 04:52:20 +00:00
|
|
|
if (err < 0) {
|
|
|
|
char errorstring[512];
|
|
|
|
av_strerror(err, errorstring, sizeof(errorstring));
|
|
|
|
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
|
2018-08-10 01:39:38 +00:00
|
|
|
"avcodec_send_packet() failed: %s", errorstring);
|
|
|
|
|
|
|
|
// If we've failed a bunch of decodes in a row, the decoder/renderer is
|
|
|
|
// clearly unhealthy, so let's generate a synthetic reset event to trigger
|
|
|
|
// the event loop to destroy and recreate the decoder.
|
|
|
|
if (++m_ConsecutiveFailedDecodes == FAILED_DECODES_RESET_THRESHOLD) {
|
|
|
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Resetting decoder due to consistent failure");
|
|
|
|
|
|
|
|
SDL_Event event;
|
|
|
|
event.type = SDL_RENDER_DEVICE_RESET;
|
|
|
|
SDL_PushEvent(&event);
|
|
|
|
}
|
|
|
|
|
2018-07-08 04:52:20 +00:00
|
|
|
return DR_NEED_IDR;
|
|
|
|
}
|
|
|
|
|
2020-01-27 04:15:11 +00:00
|
|
|
m_FramesIn++;
|
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
// We can receive 0 or more frames after submission of a packet, so we must
|
|
|
|
// try to read until we get EAGAIN to ensure the queue is drained. Some decoders
|
|
|
|
// run asynchronously and may return several frames at once after warming up.
|
2021-12-15 02:41:27 +00:00
|
|
|
//
|
|
|
|
// Some decoders support calling avcodec_receive_frame() without queuing a packet.
|
|
|
|
// This allows us to drain excess frames and reduce latency. We will try to learn
|
|
|
|
// if a decoder is capable of this by trying it and seeing if it works.
|
|
|
|
int receiveRetries = 0;
|
2021-12-12 00:15:49 +00:00
|
|
|
do {
|
|
|
|
AVFrame* frame = av_frame_alloc();
|
|
|
|
if (!frame) {
|
|
|
|
// Failed to allocate a frame but we did submit,
|
|
|
|
// so we can return DR_OK
|
|
|
|
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Failed to allocate frame");
|
|
|
|
return DR_OK;
|
|
|
|
}
|
2018-07-08 04:52:20 +00:00
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
err = avcodec_receive_frame(m_VideoDecoderCtx, frame);
|
|
|
|
if (err == 0) {
|
|
|
|
m_FramesOut++;
|
2020-01-27 04:15:11 +00:00
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
// Reset failed decodes count if we reached this far
|
|
|
|
m_ConsecutiveFailedDecodes = 0;
|
2018-08-10 01:39:38 +00:00
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
// Restore default log level after a successful decode
|
|
|
|
av_log_set_level(AV_LOG_INFO);
|
2019-02-14 02:34:59 +00:00
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
// Store the presentation time
|
|
|
|
// FIXME: This is wrong when reading a batch of frames
|
|
|
|
frame->pts = du->presentationTimeMs;
|
2019-11-12 02:03:47 +00:00
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
// Capture a frame timestamp to measuring pacing delay
|
|
|
|
frame->pkt_dts = SDL_GetTicks();
|
2018-12-25 20:09:45 +00:00
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
// Count time in avcodec_send_packet() and avcodec_receive_frame()
|
|
|
|
// as time spent decoding. Also count time spent in the decode unit
|
|
|
|
// queue because that's directly caused by decoder latency.
|
|
|
|
m_ActiveWndVideoStats.totalDecodeTime += LiGetMillis() - du->enqueueTimeMs;
|
2020-01-27 04:15:11 +00:00
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
// Also count the frame-to-frame delay if the decoder is delaying frames
|
|
|
|
// until a subsequent frame is submitted.
|
|
|
|
m_ActiveWndVideoStats.totalDecodeTime += (m_FramesIn - m_FramesOut) * (1000 / m_StreamFps);
|
2020-01-27 04:15:11 +00:00
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
m_ActiveWndVideoStats.decodedFrames++;
|
2019-01-22 04:27:37 +00:00
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
// Queue the frame for rendering (or render now if pacer is disabled)
|
|
|
|
m_Pacer->submitFrame(frame);
|
|
|
|
submittedFrame = true;
|
2021-12-15 02:41:27 +00:00
|
|
|
|
|
|
|
// Once we receive a frame, transition out of the Unknown state by determining
|
|
|
|
// whether a receive frame retry was needed to get this frame. We assume that
|
|
|
|
// any asynchronous decoder is going to return EAGAIN on the first frame.
|
|
|
|
if (m_CanRetryReceiveFrame == RRF_UNKNOWN) {
|
|
|
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION, "RRF mode: %s", receiveRetries > 0 ? "YES" : "NO");
|
|
|
|
m_CanRetryReceiveFrame = receiveRetries > 0 ? RRF_YES : RRF_NO;
|
|
|
|
}
|
2021-12-12 00:15:49 +00:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
av_frame_free(&frame);
|
2021-12-15 02:41:27 +00:00
|
|
|
|
|
|
|
if (err == AVERROR(EAGAIN)) {
|
|
|
|
// Break out if we can't retry or we successfully received a frame. We only want
|
|
|
|
// to retry if we haven't gotten a frame back for this input packet.
|
|
|
|
if (m_CanRetryReceiveFrame == RRF_NO || receiveRetries == MAX_RECV_FRAME_RETRIES || submittedFrame) {
|
|
|
|
// We will transition from Unknown -> No if we exceed the maximum retries.
|
|
|
|
if (m_CanRetryReceiveFrame == RRF_UNKNOWN) {
|
|
|
|
SDL_assert(!submittedFrame);
|
|
|
|
SDL_assert(receiveRetries == MAX_RECV_FRAME_RETRIES);
|
|
|
|
|
|
|
|
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION, "RRF mode: NO (timeout)");
|
|
|
|
m_CanRetryReceiveFrame = RRF_NO;
|
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
SDL_Delay(1);
|
|
|
|
}
|
|
|
|
}
|
2021-12-12 00:15:49 +00:00
|
|
|
}
|
2021-12-15 02:41:27 +00:00
|
|
|
} while (err == 0 || (err == AVERROR(EAGAIN) && receiveRetries++ < MAX_RECV_FRAME_RETRIES));
|
2018-08-10 01:39:38 +00:00
|
|
|
|
2021-12-12 00:15:49 +00:00
|
|
|
// Treat this as a failed decode if we don't manage to receive a single frame or
|
|
|
|
// if we finish the loop above with an error other than EAGAIN. Note that some
|
|
|
|
// limited number of "failed decodes" with EAGAIN are expected for asynchronous
|
|
|
|
// decoders, so we only reset the decoder if we get a ton of them in a row.
|
|
|
|
if (!submittedFrame || err != AVERROR(EAGAIN)) {
|
2021-12-12 00:37:39 +00:00
|
|
|
// Don't spam EAGAIN log messages for asynchronous decoders as long as
|
|
|
|
// they produce a frame for at least every other submitted packet.
|
|
|
|
if (m_ConsecutiveFailedDecodes > 0 || err != AVERROR(EAGAIN)) {
|
|
|
|
char errorstring[512];
|
|
|
|
av_strerror(err, errorstring, sizeof(errorstring));
|
|
|
|
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"avcodec_receive_frame() failed: %s", errorstring);
|
|
|
|
}
|
2018-08-10 01:39:38 +00:00
|
|
|
|
|
|
|
if (++m_ConsecutiveFailedDecodes == FAILED_DECODES_RESET_THRESHOLD) {
|
|
|
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
|
|
"Resetting decoder due to consistent failure");
|
|
|
|
|
|
|
|
SDL_Event event;
|
|
|
|
event.type = SDL_RENDER_DEVICE_RESET;
|
|
|
|
SDL_PushEvent(&event);
|
|
|
|
}
|
2018-07-08 04:52:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return DR_OK;
|
|
|
|
}
|
|
|
|
|
2019-04-10 04:46:14 +00:00
|
|
|
void FFmpegVideoDecoder::renderFrameOnMainThread()
|
2018-07-08 04:52:20 +00:00
|
|
|
{
|
2019-04-10 04:46:14 +00:00
|
|
|
m_Pacer->renderOnMainThread();
|
2018-07-08 04:52:20 +00:00
|
|
|
}
|
|
|
|
|