Support high-resolution stats; add audio stats framework

NOTE: this patch depends on a patch to moonlight-common-c, see [this PR](https://github.com/moonlight-stream/moonlight-common-c/pull/95).

* Adds an audio stats overlay that works with all current renderers, showing common info such as
  bitrate and packet loss. It is blue and in the upper-right, and will appear whenever the video overlay
  is enabled.
* Audio renderers are able to add more lines to the overlay (the upcoming CoreAudio patch uses this).
* Added bitrate/FEC display to both video and audio stats.
* Consolidated the 3 FPS lines into one to save a bit of space.
* All time-based stats are now microsecond-based, improving accuracy of very fast events.
This commit is contained in:
Andy Grundman 2024-09-13 13:56:10 -04:00
parent f786e94c7b
commit aa3e51d30c
29 changed files with 379 additions and 71 deletions

1
.gitignore vendored
View file

@ -2,6 +2,7 @@
**/.vs/
.vscode/
build/
config.tests/*/.qmake.stash
config.tests/*/Makefile

View file

@ -196,6 +196,7 @@ SOURCES += \
streaming/input/reltouch.cpp \
streaming/session.cpp \
streaming/audio/audio.cpp \
streaming/audio/renderers/renderer.cpp \
streaming/audio/renderers/sdlaud.cpp \
gui/computermodel.cpp \
gui/appmodel.cpp \

View file

@ -157,6 +157,8 @@ int Session::arInit(int /* audioConfiguration */,
void Session::arCleanup()
{
s_ActiveSession->m_AudioRenderer->logGlobalAudioStats();
delete s_ActiveSession->m_AudioRenderer;
s_ActiveSession->m_AudioRenderer = nullptr;
@ -205,6 +207,8 @@ void Session::arDecodeAndPlaySample(char* sampleData, int sampleLength)
}
if (s_ActiveSession->m_AudioRenderer != nullptr) {
uint64_t startTimeUs = LiGetMicroseconds();
int sampleSize = s_ActiveSession->m_AudioRenderer->getAudioBufferSampleSize();
int frameSize = sampleSize * s_ActiveSession->m_ActiveAudioConfig.channelCount;
int desiredBufferSize = frameSize * s_ActiveSession->m_ActiveAudioConfig.samplesPerFrame;
@ -239,6 +243,24 @@ void Session::arDecodeAndPlaySample(char* sampleData, int sampleLength)
desiredBufferSize = 0;
}
// used to display the raw audio bitrate
s_ActiveSession->m_AudioRenderer->statsAddOpusBytesReceived(sampleLength);
// Once a second, maybe grab stats from the last two windows for display, then shift to the next stats window
if (LiGetMicroseconds() > s_ActiveSession->m_AudioRenderer->getActiveWndAudioStats().measurementStartUs + 1000000) {
if (s_ActiveSession->getOverlayManager().isOverlayEnabled(Overlay::OverlayDebugAudio)) {
AUDIO_STATS lastTwoWndAudioStats = {};
s_ActiveSession->m_AudioRenderer->snapshotAudioStats(lastTwoWndAudioStats);
s_ActiveSession->m_AudioRenderer->stringifyAudioStats(lastTwoWndAudioStats,
s_ActiveSession->getOverlayManager().getOverlayText(Overlay::OverlayDebugAudio),
s_ActiveSession->getOverlayManager().getOverlayMaxTextLength());
s_ActiveSession->getOverlayManager().setOverlayTextUpdated(Overlay::OverlayDebugAudio);
}
s_ActiveSession->m_AudioRenderer->flipAudioStatsWindows();
}
if (!s_ActiveSession->m_AudioRenderer->submitAudio(desiredBufferSize)) {
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
"Reinitializing audio renderer after failure");
@ -249,6 +271,9 @@ void Session::arDecodeAndPlaySample(char* sampleData, int sampleLength)
delete s_ActiveSession->m_AudioRenderer;
s_ActiveSession->m_AudioRenderer = nullptr;
}
// keep stats on how long the audio pipline took to execute
s_ActiveSession->m_AudioRenderer->statsTrackDecodeTime(startTimeUs);
}
// Only try to recreate the audio renderer every 200 samples (1 second)

View file

@ -0,0 +1,158 @@
#include "renderer.h"
#include <Limelight.h>
IAudioRenderer::IAudioRenderer()
{
SDL_zero(m_ActiveWndAudioStats);
SDL_zero(m_LastWndAudioStats);
SDL_zero(m_GlobalAudioStats);
m_ActiveWndAudioStats.measurementStartUs = LiGetMicroseconds();
}
int IAudioRenderer::getAudioBufferSampleSize()
{
switch (getAudioBufferFormat()) {
case IAudioRenderer::AudioFormat::Sint16NE:
return sizeof(short);
case IAudioRenderer::AudioFormat::Float32NE:
return sizeof(float);
default:
Q_UNREACHABLE();
}
}
void IAudioRenderer::addAudioStats(AUDIO_STATS& src, AUDIO_STATS& dst)
{
dst.opusBytesReceived += src.opusBytesReceived;
dst.decodedPackets += src.decodedPackets;
dst.renderedPackets += src.renderedPackets;
dst.droppedNetwork += src.droppedNetwork;
dst.droppedOverload += src.droppedOverload;
dst.decodeDurationUs += src.decodeDurationUs;
if (!LiGetEstimatedRttInfo(&dst.lastRtt, NULL)) {
dst.lastRtt = 0;
}
else {
// Our logic to determine if RTT is valid depends on us never
// getting an RTT of 0. ENet currently ensures RTTs are >= 1.
SDL_assert(dst.lastRtt > 0);
}
// Initialize the measurement start point if this is the first video stat window
if (!dst.measurementStartUs) {
dst.measurementStartUs = src.measurementStartUs;
}
// The following code assumes the global measure was already started first
SDL_assert(dst.measurementStartUs <= src.measurementStartUs);
double timeDiffSecs = (double)(LiGetMicroseconds() - dst.measurementStartUs) / 1000000.0;
dst.opusKbitsPerSec = (double)(dst.opusBytesReceived * 8) / 1000.0 / timeDiffSecs;
}
void IAudioRenderer::flipAudioStatsWindows()
{
// Called once a second, adds stats to the running global total,
// copies the active window to the last window, and initializes
// a fresh active window.
// Accumulate these values into the global stats
addAudioStats(m_ActiveWndAudioStats, m_GlobalAudioStats);
// Move this window into the last window slot and clear it for next window
SDL_memcpy(&m_LastWndAudioStats, &m_ActiveWndAudioStats, sizeof(m_ActiveWndAudioStats));
SDL_zero(m_ActiveWndAudioStats);
m_ActiveWndAudioStats.measurementStartUs = LiGetMicroseconds();
}
void IAudioRenderer::logGlobalAudioStats()
{
if (m_GlobalAudioStats.decodedPackets > 0) {
char audioStatsStr[1024];
stringifyAudioStats(m_GlobalAudioStats, audioStatsStr, sizeof(audioStatsStr));
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"\nCurrent session audio stats\n---------------------------\n%s",
audioStatsStr);
}
}
void IAudioRenderer::snapshotAudioStats(AUDIO_STATS &snapshot)
{
addAudioStats(m_LastWndAudioStats, snapshot);
addAudioStats(m_ActiveWndAudioStats, snapshot);
}
void IAudioRenderer::statsAddOpusBytesReceived(int size)
{
m_ActiveWndAudioStats.opusBytesReceived += size;
if (size) {
m_ActiveWndAudioStats.decodedPackets++;
}
else {
// if called with size=0 it indicates a packet that is presumed lost by the network
m_ActiveWndAudioStats.droppedNetwork++;
}
}
void IAudioRenderer::statsTrackDecodeTime(uint64_t startTimeUs)
{
uint64_t decodeTimeUs = LiGetMicroseconds() - startTimeUs;
m_ActiveWndAudioStats.decodeDurationUs += decodeTimeUs;
}
// Provide audio stats common to all renderer backends. Child classes can then add additional lines
// at the returned offset length into output.
int IAudioRenderer::stringifyAudioStats(AUDIO_STATS& stats, char *output, int length)
{
int offset = 0;
// Start with an empty string
output[offset] = 0;
double opusFrameSize = (double)m_opusConfig->samplesPerFrame / 48.0;
PRTP_AUDIO_STATS rtpAudioStats = LiGetRTPAudioStats();
double fecOverhead = (double)rtpAudioStats->packetCountFec * 1.0 / (rtpAudioStats->packetCountAudio + rtpAudioStats->packetCountFec);
int ret = snprintf(
&output[offset],
length - offset,
"Audio stream: %s-channel Opus low-delay @ 48 kHz (%s)\n"
"Bitrate: %.1f kbps, +%.0f%% forward error-correction\n"
"Opus config: %s, frame size: %.1f ms\n"
"Packet loss from network: %.2f%%, loss from CPU overload: %.2f%%\n"
"Average decoding time: %0.2f ms\n",
// "Audio stream: %s-channel Opus low-delay @ 48 kHz (%s)\n"
m_opusConfig->channelCount == 6 ? "5.1" : m_opusConfig->channelCount == 8 ? "7.1" : "2",
getRendererName(),
// "Bitrate: %.1f %s, +%.0f%% forward error-correction\n"
stats.opusKbitsPerSec,
fecOverhead * 100.0,
// "Opus config: %s, frame size: %.1fms\n"
// Work out if we're getting high or low quality from Sunshine. coupled surround is designed for physical speakers
((m_opusConfig->channelCount == 2 && stats.opusKbitsPerSec > 128) || !m_opusConfig->coupledStreams)
? "high quality (LAN)" // 512k stereo coupled, 1.5mbps 5.1 uncoupled, 2mbps 7.1 uncoupled
: "normal quality", // 96k stereo coupled, 256k 5.1 coupled, 450k 7.1 coupled
opusFrameSize,
// "Packet loss from network: %.2f%%, loss from CPU overload: %.2f%%\n"
stats.decodedPackets ? ((double)stats.droppedNetwork / stats.decodedPackets) * 100.0 : 0.0,
stats.decodedPackets ? ((double)stats.droppedOverload / stats.decodedPackets) * 100.0 : 0.0,
// "Average decoding time: %0.2f ms\n"
(double)(stats.decodeDurationUs / 1000.0) / stats.decodedPackets
);
if (ret < 0 || ret >= length - offset) {
SDL_assert(false);
return -1;
}
return offset + ret;
}

View file

@ -2,14 +2,37 @@
#include <Limelight.h>
#include <QtGlobal>
#include <SDL.h>
typedef struct _AUDIO_STATS {
uint32_t opusBytesReceived;
uint32_t decodedPackets; // total packets decoded (if less than renderedPackets it indicates droppedOverload)
uint32_t renderedPackets; // total audio packets rendered (only for certain backends)
uint32_t droppedNetwork; // total packets lost to the network
uint32_t droppedOverload; // total times we dropped a packet due to being unable to run in time
uint32_t totalGlitches; // total times the audio was interrupted
uint64_t decodeDurationUs; // cumulative render time, microseconds
uint64_t decodeDurationUsMax; // slowest render time, microseconds
uint32_t lastRtt; // network latency from enet, milliseconds
uint64_t measurementStartUs; // timestamp stats were started, microseconds
double opusKbitsPerSec; // current Opus bitrate in kbps, not including FEC overhead
} AUDIO_STATS, *PAUDIO_STATS;
class IAudioRenderer
{
public:
IAudioRenderer();
virtual ~IAudioRenderer() {}
virtual bool prepareForPlayback(const OPUS_MULTISTREAM_CONFIGURATION* opusConfig) = 0;
virtual void setOpusConfig(const OPUS_MULTISTREAM_CONFIGURATION* opusConfig) {
m_opusConfig = opusConfig;
}
virtual void* getAudioBuffer(int* size) = 0;
// Return false if an unrecoverable error has occurred and the renderer must be reinitialized
@ -33,14 +56,28 @@ public:
};
virtual AudioFormat getAudioBufferFormat() = 0;
int getAudioBufferSampleSize() {
switch (getAudioBufferFormat()) {
case IAudioRenderer::AudioFormat::Sint16NE:
return sizeof(short);
case IAudioRenderer::AudioFormat::Float32NE:
return sizeof(float);
default:
Q_UNREACHABLE();
}
virtual int getAudioBufferSampleSize();
AUDIO_STATS & getActiveWndAudioStats() {
return m_ActiveWndAudioStats;
}
virtual const char * getRendererName() { return "IAudioRenderer"; };
// generic stats handling for all child classes
virtual void addAudioStats(AUDIO_STATS &, AUDIO_STATS &);
virtual void flipAudioStatsWindows();
virtual void logGlobalAudioStats();
virtual void snapshotAudioStats(AUDIO_STATS &);
virtual void statsAddOpusBytesReceived(int);
virtual void statsTrackDecodeTime(uint64_t);
virtual int stringifyAudioStats(AUDIO_STATS &, char *, int);
protected:
AUDIO_STATS m_ActiveWndAudioStats;
AUDIO_STATS m_LastWndAudioStats;
AUDIO_STATS m_GlobalAudioStats;
// input stream metadata
const OPUS_MULTISTREAM_CONFIGURATION* m_opusConfig;
};

View file

@ -20,8 +20,11 @@ public:
virtual AudioFormat getAudioBufferFormat();
const char * getRendererName() { return m_Name; }
private:
SDL_AudioDeviceID m_AudioDevice;
void* m_AudioBuffer;
int m_FrameSize;
char m_Name[24];
};

View file

@ -5,7 +5,8 @@
SdlAudioRenderer::SdlAudioRenderer()
: m_AudioDevice(0),
m_AudioBuffer(nullptr)
m_AudioBuffer(nullptr),
m_Name("SDL")
{
SDL_assert(!SDL_WasInit(SDL_INIT_AUDIO));
@ -59,6 +60,8 @@ bool SdlAudioRenderer::prepareForPlayback(const OPUS_MULTISTREAM_CONFIGURATION*
return false;
}
setOpusConfig(opusConfig);
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"Desired audio buffer: %u samples (%u bytes)",
want.samples,
@ -69,9 +72,10 @@ bool SdlAudioRenderer::prepareForPlayback(const OPUS_MULTISTREAM_CONFIGURATION*
have.samples,
have.size);
const char *driver = SDL_GetCurrentAudioDriver();
snprintf(m_Name, 5 + strlen(driver), "SDL/%s", driver);
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"SDL audio driver: %s",
SDL_GetCurrentAudioDriver());
"SDL audio driver: %s", driver);
// Start playback
SDL_PauseAudioDevice(m_AudioDevice, 0);
@ -110,6 +114,8 @@ bool SdlAudioRenderer::submitAudio(int bytesWritten)
// Don't queue if there's already more than 30 ms of audio data waiting
// in Moonlight's audio queue.
if (LiGetPendingAudioDuration() > 30) {
m_ActiveWndAudioStats.totalGlitches++;
m_ActiveWndAudioStats.droppedOverload++;
return true;
}

View file

@ -19,6 +19,8 @@ bool SLAudioRenderer::prepareForPlayback(const OPUS_MULTISTREAM_CONFIGURATION* o
return false;
}
setOpusConfig(opusConfig);
// This number is pretty conservative (especially for surround), but
// it's hard to avoid since we get crushed by CPU limitations.
m_MaxQueuedAudioMs = 40 * opusConfig->channelCount / 2;
@ -109,6 +111,8 @@ bool SLAudioRenderer::submitAudio(int bytesWritten)
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"Too many queued audio frames: %d",
LiGetPendingAudioFrames());
m_ActiveWndAudioStats.totalGlitches++;
m_ActiveWndAudioStats.droppedOverload++;
}
return true;

View file

@ -20,6 +20,8 @@ public:
virtual AudioFormat getAudioBufferFormat();
const char * getRendererName() { return "Steam Link"; }
virtual void remapChannels(POPUS_MULTISTREAM_CONFIGURATION opusConfig);
private:

View file

@ -12,7 +12,8 @@ SoundIoAudioRenderer::SoundIoAudioRenderer()
m_RingBuffer(nullptr),
m_AudioPacketDuration(0),
m_Latency(0),
m_Errored(false)
m_Errored(false),
m_Name("libsoundio")
{
}
@ -109,6 +110,8 @@ bool SoundIoAudioRenderer::prepareForPlayback(const OPUS_MULTISTREAM_CONFIGURATI
return false;
}
setOpusConfig(opusConfig);
m_SoundIo->app_name = "Moonlight";
m_SoundIo->userdata = this;
m_SoundIo->on_backend_disconnect = sioBackendDisconnect;
@ -123,7 +126,7 @@ bool SoundIoAudioRenderer::prepareForPlayback(const OPUS_MULTISTREAM_CONFIGURATI
}
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"Audio backend: %s",
"Audio backend: soundio using %s",
soundio_backend_name(m_SoundIo->current_backend));
// Don't continue if we could only open the dummy backend

View file

@ -21,6 +21,14 @@ public:
virtual AudioFormat getAudioBufferFormat();
const char * getRendererName() {
if (m_SoundIo != nullptr) {
const char *backend = soundio_backend_name(m_SoundIo->current_backend);
snprintf(m_Name, 12 + strlen(backend), "libsoundio/%s", backend );
}
return m_Name;
}
private:
int scoreChannelLayout(const struct SoundIoChannelLayout* layout, const OPUS_MULTISTREAM_CONFIGURATION* opusConfig);
@ -41,4 +49,5 @@ private:
double m_AudioPacketDuration;
double m_Latency;
bool m_Errored;
char m_Name[24];
};

View file

@ -388,6 +388,8 @@ void SdlInputHandler::handleControllerButtonEvent(SDL_ControllerButtonEvent* eve
// Toggle the stats overlay
Session::get()->getOverlayManager().setOverlayState(Overlay::OverlayDebug,
!Session::get()->getOverlayManager().isOverlayEnabled(Overlay::OverlayDebug));
Session::get()->getOverlayManager().setOverlayState(Overlay::OverlayDebugAudio,
!Session::get()->getOverlayManager().isOverlayEnabled(Overlay::OverlayDebugAudio));
// Clear buttons down on this gamepad
LiSendMultiControllerEvent(state->index, m_GamepadMask,

View file

@ -56,6 +56,8 @@ void SdlInputHandler::performSpecialKeyCombo(KeyCombo combo)
// Toggle the stats overlay
Session::get()->getOverlayManager().setOverlayState(Overlay::OverlayDebug,
!Session::get()->getOverlayManager().isOverlayEnabled(Overlay::OverlayDebug));
Session::get()->getOverlayManager().setOverlayState(Overlay::OverlayDebugAudio,
!Session::get()->getOverlayManager().isOverlayEnabled(Overlay::OverlayDebugAudio));
break;
case KeyComboToggleMouseMode:

View file

@ -1960,6 +1960,7 @@ void Session::execInternal()
// Toggle the stats overlay if requested by the user
m_OverlayManager.setOverlayState(Overlay::OverlayDebug, m_Preferences->showPerformanceOverlay);
m_OverlayManager.setOverlayState(Overlay::OverlayDebugAudio, m_Preferences->showPerformanceOverlay);
// Hijack this thread to be the SDL main thread. We have to do this
// because we want to suspend all Qt processing until the stream is over.

View file

@ -200,7 +200,7 @@ bool StreamUtils::getNativeDesktopMode(int displayIndex, SDL_DisplayMode* mode,
CGDirectDisplayID displayIds[MAX_DISPLAYS];
uint32_t displayCount = 0;
CGGetActiveDisplayList(MAX_DISPLAYS, displayIds, &displayCount);
if (displayIndex >= displayCount) {
if (displayIndex >= (int)displayCount) {
return false;
}

View file

@ -9,27 +9,29 @@
#define MAX_SLICES 4
typedef struct _VIDEO_STATS {
uint64_t receivedVideoBytes;
uint32_t receivedFrames;
uint32_t decodedFrames;
uint32_t renderedFrames;
uint32_t totalFrames;
uint32_t networkDroppedFrames;
uint32_t pacerDroppedFrames;
uint16_t minHostProcessingLatency;
uint16_t maxHostProcessingLatency;
uint32_t totalHostProcessingLatency;
uint32_t framesWithHostProcessingLatency;
uint32_t totalReassemblyTime;
uint32_t totalDecodeTime;
uint32_t totalPacerTime;
uint32_t totalRenderTime;
uint32_t lastRtt;
uint32_t lastRttVariance;
float totalFps;
float receivedFps;
float decodedFps;
float renderedFps;
uint32_t measurementStartTimestamp;
uint16_t minHostProcessingLatency; // low-res from RTP
uint16_t maxHostProcessingLatency; // low-res from RTP
uint32_t totalHostProcessingLatency; // low-res from RTP
uint32_t framesWithHostProcessingLatency; // low-res from RTP
uint64_t totalReassemblyTimeUs; // high-res (1us)
uint64_t totalDecodeTimeUs; // high-res from moonlight-common-c (1us)
uint64_t totalPacerTimeUs; // high-res (1us)
uint64_t totaldecodeTimeUs; // high-res (1us)
uint32_t lastRtt; // low-res from enet (1ms)
uint32_t lastRttVariance; // low-res from enet (1ms)
double totalFps; // high-res
double receivedFps; // high-res
double decodedFps; // high-res
double renderedFps; // high-res
double videoMegabitsPerSec; // current video bitrate in Mbps, not including FEC overhead
uint64_t measurementStartUs; // microseconds
} VIDEO_STATS, *PVIDEO_STATS;
typedef struct _DECODER_PARAMETERS {

View file

@ -967,6 +967,11 @@ void D3D11VARenderer::notifyOverlayUpdated(Overlay::OverlayType type)
renderRect.x = 0;
renderRect.y = m_DisplayHeight - newSurface->h;
}
else if (type == Overlay::OverlayDebugAudio) {
// Top right
renderRect.x = m_DisplayWidth - newSurface->w;
renderRect.y = m_DisplayHeight - newSurface->h;
}
renderRect.w = newSurface->w;
renderRect.h = newSurface->h;

View file

@ -866,6 +866,11 @@ void DXVA2Renderer::notifyOverlayUpdated(Overlay::OverlayType type)
renderRect.x = 0;
renderRect.y = 0;
}
else if (type == Overlay::OverlayDebugAudio) {
// Top right
renderRect.x = m_DisplayWidth - newSurface->w;
renderRect.y = 0;
}
renderRect.w = newSurface->w;
renderRect.h = newSurface->h;

View file

@ -241,6 +241,11 @@ void EGLRenderer::renderOverlay(Overlay::OverlayType type, int viewportWidth, in
// Top left
overlayRect.x = 0;
overlayRect.y = viewportHeight - newSurface->h;
}
else if (type == Overlay::OverlayDebugAudio) {
// Top right
overlayRect.x = viewportWidth - newSurface->w;
overlayRect.y = viewportHeight - newSurface->h;
} else {
SDL_assert(false);
}

View file

@ -333,14 +333,14 @@ void Pacer::signalVsync()
void Pacer::renderFrame(AVFrame* frame)
{
// Count time spent in Pacer's queues
Uint32 beforeRender = SDL_GetTicks();
m_VideoStats->totalPacerTime += beforeRender - frame->pkt_dts;
uint64_t beforeRender = LiGetMicroseconds();
m_VideoStats->totalPacerTimeUs += (beforeRender - (uint64_t)frame->pkt_dts);
// Render it
m_VsyncRenderer->renderFrame(frame);
Uint32 afterRender = SDL_GetTicks();
uint64_t afterRender = LiGetMicroseconds();
m_VideoStats->totalRenderTime += afterRender - beforeRender;
m_VideoStats->totaldecodeTimeUs += (afterRender - beforeRender);
m_VideoStats->renderedFrames++;
av_frame_free(&frame);

View file

@ -770,6 +770,11 @@ void PlVkRenderer::renderFrame(AVFrame *frame)
overlayParts[i].dst.x0 = 0;
overlayParts[i].dst.y0 = 0;
}
else if (i == Overlay::OverlayDebugAudio) {
// Top right
overlayParts[i].dst.x0 = SDL_max(0, targetFrame.crop.x1 - overlayParts[i].src.x1);
overlayParts[i].dst.y0 = 0;
}
overlayParts[i].dst.x1 = overlayParts[i].dst.x0 + overlayParts[i].src.x1;
overlayParts[i].dst.y1 = overlayParts[i].dst.y0 + overlayParts[i].src.y1;

View file

@ -229,10 +229,11 @@ void SdlRenderer::renderOverlay(Overlay::OverlayType type)
SDL_DestroyTexture(m_OverlayTextures[type]);
}
SDL_Rect viewportRect;
SDL_RenderGetViewport(m_Renderer, &viewportRect);
if (type == Overlay::OverlayStatusUpdate) {
// Bottom Left
SDL_Rect viewportRect;
SDL_RenderGetViewport(m_Renderer, &viewportRect);
m_OverlayRects[type].x = 0;
m_OverlayRects[type].y = viewportRect.h - newSurface->h;
}
@ -241,6 +242,11 @@ void SdlRenderer::renderOverlay(Overlay::OverlayType type)
m_OverlayRects[type].x = 0;
m_OverlayRects[type].y = 0;
}
else if (type == Overlay::OverlayDebugAudio) {
// Top right
m_OverlayRects[type].x = viewportRect.w - newSurface->w;
m_OverlayRects[type].y = 0;
}
m_OverlayRects[type].w = newSurface->w;
m_OverlayRects[type].h = newSurface->h;

View file

@ -703,6 +703,11 @@ void VAAPIRenderer::notifyOverlayUpdated(Overlay::OverlayType type)
overlayRect.x = 0;
overlayRect.y = 0;
}
else if (type == Overlay::OverlayDebugAudio) {
// Top right
overlayRect.x = -newSurface->w;
overlayRect.y = 0;
}
overlayRect.w = newSurface->w;
overlayRect.h = newSurface->h;

View file

@ -435,6 +435,11 @@ void VDPAURenderer::notifyOverlayUpdated(Overlay::OverlayType type)
overlayRect.x0 = 0;
overlayRect.y0 = 0;
}
else if (type == Overlay::OverlayDebugAudio) {
// Top right
overlayRect.x0 = m_DisplayWidth - newSurface->w;
overlayRect.y0 = 0;
}
overlayRect.x1 = overlayRect.x0 + newSurface->w;
overlayRect.y1 = overlayRect.y0 + newSurface->h;

View file

@ -497,6 +497,9 @@ public:
case Overlay::OverlayDebug:
[m_OverlayTextFields[type] setAlignment:NSTextAlignmentLeft];
break;
case Overlay::OverlayDebugAudio:
[m_OverlayTextFields[type] setAlignment:NSTextAlignmentRight]; // XXX
break;
case Overlay::OverlayStatusUpdate:
[m_OverlayTextFields[type] setAlignment:NSTextAlignmentRight];
break;

View file

@ -603,6 +603,11 @@ public:
renderRect.x = 0;
renderRect.y = m_LastDrawableHeight - overlayTexture.height;
}
else if (i == Overlay::OverlayDebugAudio) {
// Top right
renderRect.x = m_LastDrawableWidth - overlayTexture.width;
renderRect.y = m_LastDrawableHeight - overlayTexture.height;
}
renderRect.w = overlayTexture.width;
renderRect.h = overlayTexture.height;

View file

@ -650,16 +650,17 @@ bool FFmpegVideoDecoder::completeInitialization(const AVCodec* decoder, enum AVP
void FFmpegVideoDecoder::addVideoStats(VIDEO_STATS& src, VIDEO_STATS& dst)
{
dst.receivedVideoBytes += src.receivedVideoBytes;
dst.receivedFrames += src.receivedFrames;
dst.decodedFrames += src.decodedFrames;
dst.renderedFrames += src.renderedFrames;
dst.totalFrames += src.totalFrames;
dst.networkDroppedFrames += src.networkDroppedFrames;
dst.pacerDroppedFrames += src.pacerDroppedFrames;
dst.totalReassemblyTime += src.totalReassemblyTime;
dst.totalDecodeTime += src.totalDecodeTime;
dst.totalPacerTime += src.totalPacerTime;
dst.totalRenderTime += src.totalRenderTime;
dst.totalReassemblyTimeUs += src.totalReassemblyTimeUs;
dst.totalDecodeTimeUs += src.totalDecodeTimeUs;
dst.totalPacerTimeUs += src.totalPacerTimeUs;
dst.totaldecodeTimeUs += src.totaldecodeTimeUs;
if (dst.minHostProcessingLatency == 0) {
dst.minHostProcessingLatency = src.minHostProcessingLatency;
@ -681,20 +682,20 @@ void FFmpegVideoDecoder::addVideoStats(VIDEO_STATS& src, VIDEO_STATS& dst)
SDL_assert(dst.lastRtt > 0);
}
Uint32 now = SDL_GetTicks();
// Initialize the measurement start point if this is the first video stat window
if (!dst.measurementStartTimestamp) {
dst.measurementStartTimestamp = src.measurementStartTimestamp;
if (!dst.measurementStartUs) {
dst.measurementStartUs = src.measurementStartUs;
}
// The following code assumes the global measure was already started first
SDL_assert(dst.measurementStartTimestamp <= src.measurementStartTimestamp);
SDL_assert(dst.measurementStartUs <= src.measurementStartUs);
dst.totalFps = (float)dst.totalFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
dst.receivedFps = (float)dst.receivedFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
dst.decodedFps = (float)dst.decodedFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
dst.renderedFps = (float)dst.renderedFrames / ((float)(now - dst.measurementStartTimestamp) / 1000);
double timeDiffSecs = (double)(LiGetMicroseconds() - dst.measurementStartUs) / 1000000.0;
dst.totalFps = (double)dst.totalFrames / timeDiffSecs;
dst.receivedFps = (double)dst.receivedFrames / timeDiffSecs;
dst.decodedFps = (double)dst.decodedFrames / timeDiffSecs;
dst.renderedFps = (double)dst.renderedFrames / timeDiffSecs;
dst.videoMegabitsPerSec = (double)(dst.receivedVideoBytes * 8) / 1000000.0 / timeDiffSecs;
}
void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output, int length)
@ -776,13 +777,21 @@ void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output, i
if (stats.receivedFps > 0) {
if (m_VideoDecoderCtx != nullptr) {
PRTP_VIDEO_STATS rtpVideoStats = LiGetRTPVideoStats();
float fecOverhead = (float)rtpVideoStats->packetCountFec * 1.0 / (rtpVideoStats->packetCountVideo + rtpVideoStats->packetCountFec);
bool useKb = stats.videoMegabitsPerSec < 1 ? true : false;
ret = snprintf(&output[offset],
length - offset,
"Video stream: %dx%d %.2f FPS (Codec: %s)\n",
"Video stream: %dx%d %.2f FPS (Codec: %s)\n"
"Bitrate: %.1f %s, +%.0f%% forward error-correction\n",
m_VideoDecoderCtx->width,
m_VideoDecoderCtx->height,
stats.totalFps,
codecString);
codecString,
useKb ? stats.videoMegabitsPerSec * 1000 : stats.videoMegabitsPerSec,
useKb ? "kbps" : "Mbps",
fecOverhead * 100.0);
if (ret < 0 || ret >= length - offset) {
SDL_assert(false);
return;
@ -793,12 +802,8 @@ void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output, i
ret = snprintf(&output[offset],
length - offset,
"Incoming frame rate from network: %.2f FPS\n"
"Decoding frame rate: %.2f FPS\n"
"Rendering frame rate: %.2f FPS\n",
stats.receivedFps,
stats.decodedFps,
stats.renderedFps);
"FPS incoming/decoding/rendering: %.2f/%.2f/%.2f\n",
stats.receivedFps, stats.decodedFps, stats.renderedFps);
if (ret < 0 || ret >= length - offset) {
SDL_assert(false);
return;
@ -843,9 +848,9 @@ void FFmpegVideoDecoder::stringifyVideoStats(VIDEO_STATS& stats, char* output, i
(float)stats.networkDroppedFrames / stats.totalFrames * 100,
(float)stats.pacerDroppedFrames / stats.decodedFrames * 100,
rttString,
(float)stats.totalDecodeTime / stats.decodedFrames,
(float)stats.totalPacerTime / stats.renderedFrames,
(float)stats.totalRenderTime / stats.renderedFrames);
(double)(stats.totalDecodeTimeUs / 1000.0) / stats.decodedFrames,
(double)(stats.totalPacerTimeUs / 1000.0) / stats.renderedFrames,
(double)(stats.totaldecodeTimeUs / 1000.0) / stats.renderedFrames);
if (ret < 0 || ret >= length - offset) {
SDL_assert(false);
return;
@ -862,10 +867,8 @@ void FFmpegVideoDecoder::logVideoStats(VIDEO_STATS& stats, const char* title)
stringifyVideoStats(stats, videoStatsStr, sizeof(videoStatsStr));
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"%s", title);
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"----------------------------------------------------------\n%s",
videoStatsStr);
"\n%s\n------------------\n%s",
title, videoStatsStr);
}
}
@ -1656,7 +1659,7 @@ void FFmpegVideoDecoder::decoderThreadProc()
av_log_set_level(AV_LOG_INFO);
// Capture a frame timestamp to measuring pacing delay
frame->pkt_dts = SDL_GetTicks();
frame->pkt_dts = LiGetMicroseconds();
if (!m_FrameInfoQueue.isEmpty()) {
// Data buffers in the DU are not valid here!
@ -1665,7 +1668,7 @@ void FFmpegVideoDecoder::decoderThreadProc()
// Count time in avcodec_send_packet() and avcodec_receive_frame()
// as time spent decoding. Also count time spent in the decode unit
// queue because that's directly caused by decoder latency.
m_ActiveWndVideoStats.totalDecodeTime += LiGetMillis() - du.enqueueTimeMs;
m_ActiveWndVideoStats.totalDecodeTimeUs += (LiGetMicroseconds() - du.enqueueTimeUs);
// Store the presentation time
frame->pts = du.presentationTimeMs;
@ -1741,18 +1744,19 @@ int FFmpegVideoDecoder::submitDecodeUnit(PDECODE_UNIT du)
}
if (!m_LastFrameNumber) {
m_ActiveWndVideoStats.measurementStartTimestamp = SDL_GetTicks();
m_ActiveWndVideoStats.measurementStartUs = LiGetMicroseconds();
m_LastFrameNumber = du->frameNumber;
}
else {
// Any frame number greater than m_LastFrameNumber + 1 represents a dropped frame
m_ActiveWndVideoStats.networkDroppedFrames += du->frameNumber - (m_LastFrameNumber + 1);
m_ActiveWndVideoStats.totalFrames += du->frameNumber - (m_LastFrameNumber + 1);
m_ActiveWndVideoStats.receivedVideoBytes += (uint64_t)du->fullLength;
m_LastFrameNumber = du->frameNumber;
}
// Flip stats windows roughly every second
if (SDL_TICKS_PASSED(SDL_GetTicks(), m_ActiveWndVideoStats.measurementStartTimestamp + 1000)) {
if (LiGetMicroseconds() > m_ActiveWndVideoStats.measurementStartUs + 1000000) {
// Update overlay stats if it's enabled
if (Session::get()->getOverlayManager().isOverlayEnabled(Overlay::OverlayDebug)) {
VIDEO_STATS lastTwoWndStats = {};
@ -1771,7 +1775,7 @@ int FFmpegVideoDecoder::submitDecodeUnit(PDECODE_UNIT du)
// Move this window into the last window slot and clear it for next window
SDL_memcpy(&m_LastWndVideoStats, &m_ActiveWndVideoStats, sizeof(m_ActiveWndVideoStats));
SDL_zero(m_ActiveWndVideoStats);
m_ActiveWndVideoStats.measurementStartTimestamp = SDL_GetTicks();
m_ActiveWndVideoStats.measurementStartUs = LiGetMicroseconds();
}
if (du->frameHostProcessingLatency != 0) {
@ -1814,7 +1818,7 @@ int FFmpegVideoDecoder::submitDecodeUnit(PDECODE_UNIT du)
m_Pkt->flags = 0;
}
m_ActiveWndVideoStats.totalReassemblyTime += du->enqueueTimeMs - du->receiveTimeMs;
m_ActiveWndVideoStats.totalReassemblyTimeUs += (du->enqueueTimeUs - du->receiveTimeUs);
err = avcodec_send_packet(m_VideoDecoderCtx, m_Pkt);
if (err < 0) {

View file

@ -12,6 +12,9 @@ OverlayManager::OverlayManager() :
m_Overlays[OverlayType::OverlayDebug].color = {0xD0, 0xD0, 0x00, 0xFF};
m_Overlays[OverlayType::OverlayDebug].fontSize = 20;
m_Overlays[OverlayType::OverlayDebugAudio].color = {0x00, 0xD0, 0xD0, 0xFF};
m_Overlays[OverlayType::OverlayDebugAudio].fontSize = 20;
m_Overlays[OverlayType::OverlayStatusUpdate].color = {0xCC, 0x00, 0x00, 0xFF};
m_Overlays[OverlayType::OverlayStatusUpdate].fontSize = 36;

View file

@ -9,6 +9,7 @@ namespace Overlay {
enum OverlayType {
OverlayDebug,
OverlayDebugAudio,
OverlayStatusUpdate,
OverlayMax
};
@ -46,7 +47,7 @@ private:
bool enabled;
int fontSize;
SDL_Color color;
char text[512];
char text[1024];
TTF_Font* font;
SDL_Surface* surface;