diff --git a/app/app.pro b/app/app.pro index eb505875..c819f010 100644 --- a/app/app.pro +++ b/app/app.pro @@ -41,7 +41,7 @@ win32 { } macx { LIBS += -lssl -lcrypto -lSDL2 -lavcodec.58 -lavdevice.58 -lavformat.58 -lavutil.56 - LIBS += -lobjc -framework VideoToolbox -framework AVFoundation -framework CoreGraphics -framework CoreMedia -framework AppKit + LIBS += -lobjc -framework VideoToolbox -framework AVFoundation -framework CoreVideo -framework CoreGraphics -framework CoreMedia -framework AppKit } SOURCES += \ diff --git a/app/streaming/video/ffmpeg-renderers/dxva2.cpp b/app/streaming/video/ffmpeg-renderers/dxva2.cpp index d1408a44..a6e5de17 100644 --- a/app/streaming/video/ffmpeg-renderers/dxva2.cpp +++ b/app/streaming/video/ffmpeg-renderers/dxva2.cpp @@ -400,15 +400,18 @@ void DXVA2Renderer::renderFrame(AVFrame* frame) case D3DERR_DEVICELOST: SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "D3D device lost!"); + av_frame_free(&frame); return; case D3DERR_DEVICENOTRESET: SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "D3D device not reset!"); + av_frame_free(&frame); return; default: SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Unknown D3D error: %x", hr); + av_frame_free(&frame); return; } @@ -449,4 +452,6 @@ void DXVA2Renderer::renderFrame(AVFrame* frame) } m_Device->Present(nullptr, nullptr, nullptr, nullptr); + + av_frame_free(&frame); } diff --git a/app/streaming/video/ffmpeg-renderers/sdl.cpp b/app/streaming/video/ffmpeg-renderers/sdl.cpp index 89d05708..b0258ecb 100644 --- a/app/streaming/video/ffmpeg-renderers/sdl.cpp +++ b/app/streaming/video/ffmpeg-renderers/sdl.cpp @@ -65,6 +65,10 @@ void SdlRenderer::renderFrame(AVFrame* frame) frame->linesize[1], frame->data[2], frame->linesize[2]); + + // Done with the frame now + av_frame_free(&frame); + SDL_RenderClear(m_Renderer); SDL_RenderCopy(m_Renderer, m_Texture, nullptr, nullptr); SDL_RenderPresent(m_Renderer); diff --git a/app/streaming/video/ffmpeg-renderers/vt.mm b/app/streaming/video/ffmpeg-renderers/vt.mm index a8b3c388..d72b48af 100644 --- a/app/streaming/video/ffmpeg-renderers/vt.mm +++ b/app/streaming/video/ffmpeg-renderers/vt.mm @@ -7,9 +7,14 @@ #include #include +#include + #import #import #import +#import + +#define FRAME_HISTORY_ENTRIES 8 class VTRenderer : public IFFmpegRenderer { @@ -18,9 +23,10 @@ public: : m_HwContext(nullptr), m_DisplayLayer(nullptr), m_FormatDesc(nullptr), - m_View(nullptr) + m_View(nullptr), + m_DisplayLink(nullptr), + m_FrameQueueLock(0) { - } virtual ~VTRenderer() @@ -32,8 +38,120 @@ public: if (m_FormatDesc != nullptr) { CFRelease(m_FormatDesc); } + + if (m_DisplayLink != nullptr) { + CVDisplayLinkStop(m_DisplayLink); + CVDisplayLinkRelease(m_DisplayLink); + } + + while (!m_FrameQueue.isEmpty()) { + AVFrame* frame = m_FrameQueue.dequeue(); + av_frame_free(&frame); + } } + void drawFrame(uint64_t vsyncTime) + { + OSStatus status; + + SDL_AtomicLock(&m_FrameQueueLock); + + int frameDropTarget; + + // If the queue length history entries are large, be strict + // about dropping excess frames. + frameDropTarget = 1; + for (int i = 0; i < m_FrameQueueHistory.count(); i++) { + if (m_FrameQueueHistory[i] <= 1) { + // Be lenient as long as the queue length + // resolves before the end of frame history + frameDropTarget = 3; + } + } + + if (m_FrameQueueHistory.count() == FRAME_HISTORY_ENTRIES) { + m_FrameQueueHistory.dequeue(); + } + + m_FrameQueueHistory.enqueue(m_FrameQueue.count()); + + // Catch up if we're several frames ahead + while (m_FrameQueue.count() > frameDropTarget) { + AVFrame* frame = m_FrameQueue.dequeue(); + av_frame_free(&frame); + } + + if (m_FrameQueue.isEmpty()) { + SDL_AtomicUnlock(&m_FrameQueueLock); + return; + } + + // Grab the first frame + AVFrame* frame = m_FrameQueue.dequeue(); + SDL_AtomicUnlock(&m_FrameQueueLock); + + CVPixelBufferRef pixBuf = reinterpret_cast(frame->data[3]); + + // If the format has changed or doesn't exist yet, construct it with the + // pixel buffer data + if (!m_FormatDesc || !CMVideoFormatDescriptionMatchesImageBuffer(m_FormatDesc, pixBuf)) { + if (m_FormatDesc != nullptr) { + CFRelease(m_FormatDesc); + } + status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, + pixBuf, &m_FormatDesc); + if (status != noErr) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "CMVideoFormatDescriptionCreateForImageBuffer() failed: %d", + status); + av_frame_free(&frame); + return; + } + } + + // Queue this sample for the next v-sync + CMSampleTimingInfo timingInfo = { + .duration = kCMTimeInvalid, + .decodeTimeStamp = kCMTimeInvalid, + .presentationTimeStamp = CMTimeMake(vsyncTime, 1000 * 1000 * 1000) + }; + + CMSampleBufferRef sampleBuffer; + status = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, + pixBuf, + m_FormatDesc, + &timingInfo, + &sampleBuffer); + if (status != noErr) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "CMSampleBufferCreateReadyWithImageBuffer() failed: %d", + status); + av_frame_free(&frame); + return; + } + + [m_DisplayLayer enqueueSampleBuffer:sampleBuffer]; + + CFRelease(sampleBuffer); + av_frame_free(&frame); + } + + static + CVReturn + displayLinkOutputCallback( + CVDisplayLinkRef, + const CVTimeStamp*, + const CVTimeStamp* vsyncTime, + CVOptionFlags, + CVOptionFlags*, + void *displayLinkContext) + { + VTRenderer* me = reinterpret_cast(displayLinkContext); + + me->drawFrame(vsyncTime->hostTime); + + return kCVReturnSuccess; + } virtual bool initialize(SDL_Window* window, int videoFormat, @@ -113,6 +231,10 @@ public: return false; } + CVDisplayLinkCreateWithActiveCGDisplays(&m_DisplayLink); + CVDisplayLinkSetOutputCallback(m_DisplayLink, displayLinkOutputCallback, this); + CVDisplayLinkStart(m_DisplayLink); + return true; } @@ -124,49 +246,15 @@ public: virtual void renderFrame(AVFrame* frame) override { - CVPixelBufferRef pixBuf = reinterpret_cast(frame->data[3]); - OSStatus status; - if (m_DisplayLayer.status == AVQueuedSampleBufferRenderingStatusFailed) { SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION, "Resetting failed AVSampleBufferDisplay layer"); setupDisplayLayer(); } - // If the format has changed or doesn't exist yet, construct it with the - // pixel buffer data - if (!m_FormatDesc || !CMVideoFormatDescriptionMatchesImageBuffer(m_FormatDesc, pixBuf)) { - status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, - pixBuf, &m_FormatDesc); - if (status != noErr) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "CMVideoFormatDescriptionCreateForImageBuffer() failed: %d", - status); - return; - } - } - - CMSampleBufferRef sampleBuffer; - status = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, - pixBuf, - m_FormatDesc, - &kCMTimingInfoInvalid, - &sampleBuffer); - if (status != noErr) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "CMSampleBufferCreateReadyWithImageBuffer() failed: %d", - status); - return; - } - - CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true); - CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); - - CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); - - [m_DisplayLayer enqueueSampleBuffer:sampleBuffer]; - - CFRelease(sampleBuffer); + SDL_AtomicLock(&m_FrameQueueLock); + m_FrameQueue.enqueue(frame); + SDL_AtomicUnlock(&m_FrameQueueLock); } private: @@ -192,6 +280,10 @@ private: AVSampleBufferDisplayLayer* m_DisplayLayer; CMVideoFormatDescriptionRef m_FormatDesc; NSView* m_View; + CVDisplayLinkRef m_DisplayLink; + QQueue m_FrameQueue; + QQueue m_FrameQueueHistory; + SDL_SpinLock m_FrameQueueLock; }; IFFmpegRenderer* VTRendererFactory::createRenderer() { diff --git a/app/streaming/video/ffmpeg.cpp b/app/streaming/video/ffmpeg.cpp index 7fabcaf8..b5ee99d3 100644 --- a/app/streaming/video/ffmpeg.cpp +++ b/app/streaming/video/ffmpeg.cpp @@ -232,7 +232,6 @@ void FFmpegVideoDecoder::renderFrame(SDL_UserEvent* event) { AVFrame* frame = reinterpret_cast(event->data1); m_Renderer->renderFrame(frame); - av_frame_free(&frame); } // Called on main thread