mirror of
https://github.com/rock88/moonlight-nx
synced 2025-02-17 04:58:31 +00:00
[WIP] Refactoring stream configuration
This commit is contained in:
parent
183405f575
commit
acb5282c26
27 changed files with 991 additions and 667 deletions
|
@ -83,15 +83,15 @@
|
|||
3652F083245C60D1001FABF3 /* LoadingOverlay.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 3652F081245C60D1001FABF3 /* LoadingOverlay.cpp */; };
|
||||
3652F086245C6CFC001FABF3 /* moonlight_libretro.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 3652F085245C6CFC001FABF3 /* moonlight_libretro.cpp */; };
|
||||
3652F08A245C8569001FABF3 /* ContentWindow.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 3652F089245C8569001FABF3 /* ContentWindow.cpp */; };
|
||||
3661D2F92469D1940060EE24 /* FFmpegVideoDecoder.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 3661D2F72469D1940060EE24 /* FFmpegVideoDecoder.cpp */; };
|
||||
3661D2FF2469E0C00060EE24 /* GLVideoRenderer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 3661D2FD2469E0C00060EE24 /* GLVideoRenderer.cpp */; };
|
||||
3661D3022469EFFA0060EE24 /* RetroAudioRenderer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 3661D3002469EFFA0060EE24 /* RetroAudioRenderer.cpp */; };
|
||||
367CD95A245DE25F00A95738 /* StreamWindow.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 367CD958245DE25F00A95738 /* StreamWindow.cpp */; };
|
||||
36A0C0372461DBA30083289C /* AddHostButton.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 36A0C0352461DBA30083289C /* AddHostButton.cpp */; };
|
||||
36A0C03A2461E4C00083289C /* SettingsWindow.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 36A0C0382461E4C00083289C /* SettingsWindow.cpp */; };
|
||||
36A0C03D2461F03C0083289C /* Settings.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 36A0C03B2461F03C0083289C /* Settings.cpp */; };
|
||||
36A56313245F194000901354 /* gl_render.c in Sources */ = {isa = PBXBuildFile; fileRef = 36A5630C245F194000901354 /* gl_render.c */; };
|
||||
36A56314245F194000901354 /* audio_decoder.c in Sources */ = {isa = PBXBuildFile; fileRef = 36A5630E245F194000901354 /* audio_decoder.c */; };
|
||||
36A56315245F194000901354 /* ffmpeg.c in Sources */ = {isa = PBXBuildFile; fileRef = 36A56310245F194000901354 /* ffmpeg.c */; };
|
||||
36A56316245F194000901354 /* video_decoder.c in Sources */ = {isa = PBXBuildFile; fileRef = 36A56312245F194000901354 /* video_decoder.c */; };
|
||||
36B406982459F5CB005BD903 /* moonlight_glfw.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 36B406962459F460005BD903 /* moonlight_glfw.cpp */; };
|
||||
36D3F8442469B5C400CDEF9B /* MoonlightSession.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 36D3F8422469B5C400CDEF9B /* MoonlightSession.cpp */; };
|
||||
36DBDE9A2450BCD50057C8D3 /* CoreFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 36DBDE992450BCD50057C8D3 /* CoreFoundation.framework */; };
|
||||
36DBDE9C2450BCD90057C8D3 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 36DBDE9B2450BCD90057C8D3 /* CoreGraphics.framework */; };
|
||||
36DBDE9E2450BCF00057C8D3 /* IOKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 36DBDE9D2450BCF00057C8D3 /* IOKit.framework */; };
|
||||
|
@ -274,6 +274,14 @@
|
|||
3652F084245C6CFC001FABF3 /* libretro.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = libretro.h; sourceTree = "<group>"; };
|
||||
3652F085245C6CFC001FABF3 /* moonlight_libretro.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = moonlight_libretro.cpp; sourceTree = "<group>"; };
|
||||
3652F089245C8569001FABF3 /* ContentWindow.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = ContentWindow.cpp; sourceTree = "<group>"; };
|
||||
3661D2F72469D1940060EE24 /* FFmpegVideoDecoder.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = FFmpegVideoDecoder.cpp; sourceTree = "<group>"; };
|
||||
3661D2F82469D1940060EE24 /* FFmpegVideoDecoder.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = FFmpegVideoDecoder.hpp; sourceTree = "<group>"; };
|
||||
3661D2FA2469D1E50060EE24 /* IFFmpegVideoDecoder.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = IFFmpegVideoDecoder.hpp; sourceTree = "<group>"; };
|
||||
3661D2FC2469DEEF0060EE24 /* IVideoRenderer.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = IVideoRenderer.hpp; sourceTree = "<group>"; };
|
||||
3661D2FD2469E0C00060EE24 /* GLVideoRenderer.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = GLVideoRenderer.cpp; sourceTree = "<group>"; };
|
||||
3661D2FE2469E0C00060EE24 /* GLVideoRenderer.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = GLVideoRenderer.hpp; sourceTree = "<group>"; };
|
||||
3661D3002469EFFA0060EE24 /* RetroAudioRenderer.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = RetroAudioRenderer.cpp; sourceTree = "<group>"; };
|
||||
3661D3012469EFFA0060EE24 /* RetroAudioRenderer.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = RetroAudioRenderer.hpp; sourceTree = "<group>"; };
|
||||
367CD958245DE25F00A95738 /* StreamWindow.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = StreamWindow.cpp; sourceTree = "<group>"; };
|
||||
367CD959245DE25F00A95738 /* StreamWindow.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = StreamWindow.hpp; sourceTree = "<group>"; };
|
||||
36A0C0352461DBA30083289C /* AddHostButton.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = AddHostButton.cpp; sourceTree = "<group>"; };
|
||||
|
@ -283,15 +291,11 @@
|
|||
36A0C03B2461F03C0083289C /* Settings.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = Settings.cpp; sourceTree = "<group>"; };
|
||||
36A0C03C2461F03C0083289C /* Settings.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = Settings.hpp; sourceTree = "<group>"; };
|
||||
36A0C03E2461FFF10083289C /* build_opus_lakka_switch.sh */ = {isa = PBXFileReference; lastKnownFileType = text.script.sh; path = build_opus_lakka_switch.sh; sourceTree = "<group>"; };
|
||||
36A5630B245F194000901354 /* gl_render.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = gl_render.h; sourceTree = "<group>"; };
|
||||
36A5630C245F194000901354 /* gl_render.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = gl_render.c; sourceTree = "<group>"; };
|
||||
36A5630D245F194000901354 /* audio_decoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = audio_decoder.h; sourceTree = "<group>"; };
|
||||
36A5630E245F194000901354 /* audio_decoder.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = audio_decoder.c; sourceTree = "<group>"; };
|
||||
36A5630F245F194000901354 /* video_decoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = video_decoder.h; sourceTree = "<group>"; };
|
||||
36A56310245F194000901354 /* ffmpeg.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = ffmpeg.c; sourceTree = "<group>"; };
|
||||
36A56311245F194000901354 /* ffmpeg.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ffmpeg.h; sourceTree = "<group>"; };
|
||||
36A56312245F194000901354 /* video_decoder.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = video_decoder.c; sourceTree = "<group>"; };
|
||||
36B406962459F460005BD903 /* moonlight_glfw.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = moonlight_glfw.cpp; sourceTree = "<group>"; };
|
||||
36D3F8422469B5C400CDEF9B /* MoonlightSession.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = MoonlightSession.cpp; sourceTree = "<group>"; };
|
||||
36D3F8432469B5C400CDEF9B /* MoonlightSession.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = MoonlightSession.hpp; sourceTree = "<group>"; };
|
||||
36D3F8452469C6BC00CDEF9B /* Log.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Log.h; sourceTree = "<group>"; };
|
||||
36D3F8492469CC2600CDEF9B /* IAudioRenderer.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = IAudioRenderer.hpp; sourceTree = "<group>"; };
|
||||
36DBDE8E2450BB7E0057C8D3 /* moonlight */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.executable"; includeInIndex = 0; path = moonlight; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
36DBDE992450BCD50057C8D3 /* CoreFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreFoundation.framework; path = System/Library/Frameworks/CoreFoundation.framework; sourceTree = SDKROOT; };
|
||||
36DBDE9B2450BCD90057C8D3 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
|
||||
|
@ -608,19 +612,14 @@
|
|||
path = src;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
367CD95B245DE6A900A95738 /* decoders */ = {
|
||||
3661D2FB2469DEDB0060EE24 /* video */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
36A56310245F194000901354 /* ffmpeg.c */,
|
||||
36A56311245F194000901354 /* ffmpeg.h */,
|
||||
36A5630C245F194000901354 /* gl_render.c */,
|
||||
36A5630B245F194000901354 /* gl_render.h */,
|
||||
36A56312245F194000901354 /* video_decoder.c */,
|
||||
36A5630F245F194000901354 /* video_decoder.h */,
|
||||
36A5630E245F194000901354 /* audio_decoder.c */,
|
||||
36A5630D245F194000901354 /* audio_decoder.h */,
|
||||
3661D2FD2469E0C00060EE24 /* GLVideoRenderer.cpp */,
|
||||
3661D2FE2469E0C00060EE24 /* GLVideoRenderer.hpp */,
|
||||
3661D2FC2469DEEF0060EE24 /* IVideoRenderer.hpp */,
|
||||
);
|
||||
path = decoders;
|
||||
path = video;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
36B406932459F41E005BD903 /* src */ = {
|
||||
|
@ -629,13 +628,14 @@
|
|||
36DFE0D5245A1FEC00FC51CE /* glsym */,
|
||||
36DFE0CA2459FA3F00FC51CE /* nanogui_resources */,
|
||||
36DFDCF12459F79000FC51CE /* ui */,
|
||||
367CD95B245DE6A900A95738 /* decoders */,
|
||||
36D3F8462469C8DD00CDEF9B /* streaming */,
|
||||
3652F003245C28C6001FABF3 /* GameStreamClient.cpp */,
|
||||
3652F004245C28C6001FABF3 /* GameStreamClient.hpp */,
|
||||
36A0C03B2461F03C0083289C /* Settings.cpp */,
|
||||
36A0C03C2461F03C0083289C /* Settings.hpp */,
|
||||
3603E93A246316400051287D /* InputController.cpp */,
|
||||
3603E93B246316400051287D /* InputController.hpp */,
|
||||
36D3F8452469C6BC00CDEF9B /* Log.h */,
|
||||
3652F084245C6CFC001FABF3 /* libretro.h */,
|
||||
3652F085245C6CFC001FABF3 /* moonlight_libretro.cpp */,
|
||||
36B406962459F460005BD903 /* moonlight_glfw.cpp */,
|
||||
|
@ -643,6 +643,38 @@
|
|||
path = src;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
36D3F8462469C8DD00CDEF9B /* streaming */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
36D3F8482469CC0400CDEF9B /* audio */,
|
||||
3661D2FB2469DEDB0060EE24 /* video */,
|
||||
36D3F8472469CB8800CDEF9B /* ffmpeg */,
|
||||
36D3F8422469B5C400CDEF9B /* MoonlightSession.cpp */,
|
||||
36D3F8432469B5C400CDEF9B /* MoonlightSession.hpp */,
|
||||
);
|
||||
path = streaming;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
36D3F8472469CB8800CDEF9B /* ffmpeg */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
3661D2F72469D1940060EE24 /* FFmpegVideoDecoder.cpp */,
|
||||
3661D2F82469D1940060EE24 /* FFmpegVideoDecoder.hpp */,
|
||||
3661D2FA2469D1E50060EE24 /* IFFmpegVideoDecoder.hpp */,
|
||||
);
|
||||
path = ffmpeg;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
36D3F8482469CC0400CDEF9B /* audio */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
3661D3002469EFFA0060EE24 /* RetroAudioRenderer.cpp */,
|
||||
3661D3012469EFFA0060EE24 /* RetroAudioRenderer.hpp */,
|
||||
36D3F8492469CC2600CDEF9B /* IAudioRenderer.hpp */,
|
||||
);
|
||||
path = audio;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
36DBDE852450BB7E0057C8D3 = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
|
@ -801,7 +833,7 @@
|
|||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
36A56315245F194000901354 /* ffmpeg.c in Sources */,
|
||||
3661D3022469EFFA0060EE24 /* RetroAudioRenderer.cpp in Sources */,
|
||||
36DFE218245A278900FC51CE /* rglgen.c in Sources */,
|
||||
3652F075245C292B001FABF3 /* VideoStream.c in Sources */,
|
||||
3652EFE0245B3B00001FABF3 /* imageview.cpp in Sources */,
|
||||
|
@ -818,7 +850,6 @@
|
|||
3652EFE6245B3B00001FABF3 /* traits.cpp in Sources */,
|
||||
3652EFDE245B3B00001FABF3 /* tabwidget.cpp in Sources */,
|
||||
3652F07A245C292B001FABF3 /* SdpGenerator.c in Sources */,
|
||||
36A56316245F194000901354 /* video_decoder.c in Sources */,
|
||||
3652EFEB245B3B00001FABF3 /* imagepanel.cpp in Sources */,
|
||||
36A0C0372461DBA30083289C /* AddHostButton.cpp in Sources */,
|
||||
3652F06E245C292B001FABF3 /* rs.c in Sources */,
|
||||
|
@ -836,9 +867,9 @@
|
|||
3652F083245C60D1001FABF3 /* LoadingOverlay.cpp in Sources */,
|
||||
3652EFF6245B3CF2001FABF3 /* nanovg.c in Sources */,
|
||||
3652F067245C292B001FABF3 /* packet.c in Sources */,
|
||||
3661D2F92469D1940060EE24 /* FFmpegVideoDecoder.cpp in Sources */,
|
||||
3652F072245C292B001FABF3 /* RtspConnection.c in Sources */,
|
||||
3652F005245C28C6001FABF3 /* GameStreamClient.cpp in Sources */,
|
||||
36A56314245F194000901354 /* audio_decoder.c in Sources */,
|
||||
3652EFE8245B3B00001FABF3 /* opengl.cpp in Sources */,
|
||||
3652EFD7245B3B00001FABF3 /* screen.cpp in Sources */,
|
||||
3652EFE1245B3B00001FABF3 /* progressbar.cpp in Sources */,
|
||||
|
@ -850,6 +881,7 @@
|
|||
36A0C03A2461E4C00083289C /* SettingsWindow.cpp in Sources */,
|
||||
3652EFD4245B3B00001FABF3 /* shader_gl.cpp in Sources */,
|
||||
3602C3BD245DBA9100368900 /* AppButton.cpp in Sources */,
|
||||
36D3F8442469B5C400CDEF9B /* MoonlightSession.cpp in Sources */,
|
||||
3652EFE7245B3B00001FABF3 /* label.cpp in Sources */,
|
||||
3652EFDA245B3B00001FABF3 /* layout.cpp in Sources */,
|
||||
3652F080245C292B001FABF3 /* ByteBuffer.c in Sources */,
|
||||
|
@ -874,10 +906,10 @@
|
|||
3652F076245C292B001FABF3 /* Connection.c in Sources */,
|
||||
3652F011245C2919001FABF3 /* client.c in Sources */,
|
||||
3652EFE4245B3B00001FABF3 /* combobox.cpp in Sources */,
|
||||
36A56313245F194000901354 /* gl_render.c in Sources */,
|
||||
3603E93C246316400051287D /* InputController.cpp in Sources */,
|
||||
3652EFF2245B3B00001FABF3 /* textbox.cpp in Sources */,
|
||||
3652F074245C292B001FABF3 /* Platform.c in Sources */,
|
||||
3661D2FF2469E0C00060EE24 /* GLVideoRenderer.cpp in Sources */,
|
||||
3652EFCD245B3B00001FABF3 /* widget.cpp in Sources */,
|
||||
3652F073245C292B001FABF3 /* PlatformSockets.c in Sources */,
|
||||
3652F08A245C8569001FABF3 /* ContentWindow.cpp in Sources */,
|
||||
|
|
|
@ -13,15 +13,36 @@
|
|||
#include <unistd.h>
|
||||
|
||||
static std::mutex m_async_mutex;
|
||||
static std::vector<std::function<void()>> m_tasks;
|
||||
|
||||
void perform_async(std::function<void()> task) {
|
||||
auto thread = std::thread([task](){
|
||||
std::lock_guard<std::mutex> guard(m_async_mutex);
|
||||
task();
|
||||
static void task_loop() {
|
||||
auto thread = std::thread([](){
|
||||
while (1) {
|
||||
std::vector<std::function<void()>> m_tasks_copy; {
|
||||
std::lock_guard<std::mutex> guard(m_async_mutex);
|
||||
m_tasks_copy = m_tasks;
|
||||
m_tasks.clear();
|
||||
}
|
||||
|
||||
for (auto task: m_tasks_copy) {
|
||||
task();
|
||||
}
|
||||
|
||||
usleep(500'000);
|
||||
}
|
||||
});
|
||||
thread.detach();
|
||||
}
|
||||
|
||||
void perform_async(std::function<void()> task) {
|
||||
std::lock_guard<std::mutex> guard(m_async_mutex);
|
||||
m_tasks.push_back(task);
|
||||
}
|
||||
|
||||
GameStreamClient::GameStreamClient() {
|
||||
task_loop();
|
||||
}
|
||||
|
||||
void GameStreamClient::connect(const std::string &address, ServerCallback<SERVER_DATA> callback) {
|
||||
m_server_data[address] = SERVER_DATA();
|
||||
|
||||
|
|
|
@ -49,10 +49,7 @@ private:
|
|||
bool _isSuccess = false;
|
||||
};
|
||||
|
||||
#define LOG(fmt, ...) fprintf(stderr, fmt, __VA_ARGS__);
|
||||
|
||||
template<class T> using ServerCallback = const std::function<void(Result<T>)>;
|
||||
//#define ServerCallback(T) std::function<void(Result<T>)>
|
||||
|
||||
class GameStreamClient {
|
||||
public:
|
||||
|
@ -73,7 +70,7 @@ public:
|
|||
void quit(const std::string &address, ServerCallback<bool> callback);
|
||||
|
||||
private:
|
||||
GameStreamClient() {};
|
||||
GameStreamClient();
|
||||
|
||||
std::map<std::string, SERVER_DATA> m_server_data;
|
||||
std::map<std::string, PAPP_LIST> m_app_list;
|
||||
|
|
9
src/Log.h
Normal file
9
src/Log.h
Normal file
|
@ -0,0 +1,9 @@
|
|||
#include <stdint.h>
|
||||
|
||||
#define LOG(x) fprintf(stderr, x);
|
||||
|
||||
#define LOG_FMT(fmt, ...) fprintf(stderr, fmt, __VA_ARGS__);
|
||||
|
||||
#define DEBUG_LOG(x) LOG_FMT("[%s|%s|%i] %s\n", __FILE__, __FUNCTION__, __LINE__, x);
|
||||
|
||||
#define DEBUG_EMPTY_LOG DEBUG_LOG("")
|
|
@ -1,42 +0,0 @@
|
|||
#include "audio_decoder.h"
|
||||
#include "libretro.h"
|
||||
#include <opus/opus_multistream.h>
|
||||
|
||||
#define MAX_CHANNEL_COUNT 6
|
||||
#define FRAME_SIZE 240
|
||||
#define FRAME_BUFFER 12
|
||||
|
||||
static OpusMSDecoder* decoder;
|
||||
static short pcmBuffer[FRAME_SIZE * MAX_CHANNEL_COUNT];
|
||||
static int channelCount;
|
||||
|
||||
static int audio_decoder_init(int audioConfiguration, POPUS_MULTISTREAM_CONFIGURATION opusConfig, void* context, int arFlags) {
|
||||
int error;
|
||||
decoder = opus_multistream_decoder_create(opusConfig->sampleRate, opusConfig->channelCount, opusConfig->streams, opusConfig->coupledStreams, opusConfig->mapping, &error);
|
||||
channelCount = opusConfig->channelCount;
|
||||
return 0;
|
||||
}
|
||||
|
||||
extern retro_audio_sample_batch_t audio_batch_cb;
|
||||
|
||||
static void audio_decoder_decode_and_play_sample(char* data, int length) {
|
||||
if (audio_batch_cb) {
|
||||
int decodeLen = opus_multistream_decode(decoder, data, length, pcmBuffer, FRAME_SIZE, 0);
|
||||
if (decodeLen > 0) {
|
||||
audio_batch_cb(pcmBuffer, decodeLen);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void audio_decoder_cleanup() {
|
||||
if (decoder != NULL) {
|
||||
opus_multistream_decoder_destroy(decoder);
|
||||
}
|
||||
}
|
||||
|
||||
AUDIO_RENDERER_CALLBACKS audio_decoder_callbacks = {
|
||||
.init = audio_decoder_init,
|
||||
.cleanup = audio_decoder_cleanup,
|
||||
.decodeAndPlaySample = audio_decoder_decode_and_play_sample,
|
||||
.capabilities = CAPABILITY_DIRECT_SUBMIT,
|
||||
};
|
|
@ -1,3 +0,0 @@
|
|||
#include "Limelight.h"
|
||||
|
||||
extern AUDIO_RENDERER_CALLBACKS audio_decoder_callbacks;
|
|
@ -1,175 +0,0 @@
|
|||
/*
|
||||
* This file is part of Moonlight Embedded.
|
||||
*
|
||||
* Based on Moonlight Pc implementation
|
||||
*
|
||||
* Moonlight is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Moonlight is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Moonlight; if not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include "ffmpeg.h"
|
||||
|
||||
#ifdef HAVE_VAAPI
|
||||
#include "ffmpeg_vaapi.h"
|
||||
#endif
|
||||
|
||||
#include <Limelight.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <pthread.h>
|
||||
#include <stdio.h>
|
||||
#include <stdbool.h>
|
||||
|
||||
// General decoder and renderer state
|
||||
static AVPacket pkt;
|
||||
static AVCodec* decoder;
|
||||
static AVCodecContext* decoder_ctx;
|
||||
static AVFrame** dec_frames;
|
||||
|
||||
static int dec_frames_cnt;
|
||||
static int current_frame, next_frame;
|
||||
|
||||
enum decoders ffmpeg_decoder;
|
||||
|
||||
#define BYTES_PER_PIXEL 4
|
||||
|
||||
// This function must be called before
|
||||
// any other decoding functions
|
||||
int ffmpeg_init(int videoFormat, int width, int height, int perf_lvl, int buffer_count, int thread_count) {
|
||||
// Initialize the avcodec library and register codecs
|
||||
av_log_set_level(AV_LOG_QUIET);
|
||||
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(58,10,100)
|
||||
avcodec_register_all();
|
||||
#endif
|
||||
|
||||
av_init_packet(&pkt);
|
||||
|
||||
ffmpeg_decoder = perf_lvl & VAAPI_ACCELERATION ? VAAPI : SOFTWARE;
|
||||
switch (videoFormat) {
|
||||
case VIDEO_FORMAT_H264:
|
||||
decoder = avcodec_find_decoder_by_name("h264");
|
||||
break;
|
||||
case VIDEO_FORMAT_H265:
|
||||
decoder = avcodec_find_decoder_by_name("hevc");
|
||||
break;
|
||||
}
|
||||
|
||||
if (decoder == NULL) {
|
||||
printf("Couldn't find decoder\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
decoder_ctx = avcodec_alloc_context3(decoder);
|
||||
if (decoder_ctx == NULL) {
|
||||
printf("Couldn't allocate context");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (perf_lvl & DISABLE_LOOP_FILTER)
|
||||
// Skip the loop filter for performance reasons
|
||||
decoder_ctx->skip_loop_filter = AVDISCARD_ALL;
|
||||
|
||||
if (perf_lvl & LOW_LATENCY_DECODE)
|
||||
// Use low delay single threaded encoding
|
||||
decoder_ctx->flags |= AV_CODEC_FLAG_LOW_DELAY;
|
||||
|
||||
if (perf_lvl & SLICE_THREADING)
|
||||
decoder_ctx->thread_type = FF_THREAD_SLICE;
|
||||
else
|
||||
decoder_ctx->thread_type = FF_THREAD_FRAME;
|
||||
|
||||
decoder_ctx->thread_count = thread_count;
|
||||
|
||||
decoder_ctx->width = width;
|
||||
decoder_ctx->height = height;
|
||||
decoder_ctx->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||
|
||||
int err = avcodec_open2(decoder_ctx, decoder, NULL);
|
||||
if (err < 0) {
|
||||
printf("Couldn't open codec");
|
||||
return err;
|
||||
}
|
||||
|
||||
dec_frames_cnt = buffer_count;
|
||||
dec_frames = malloc(buffer_count * sizeof(AVFrame*));
|
||||
if (dec_frames == NULL) {
|
||||
fprintf(stderr, "Couldn't allocate frames");
|
||||
return -1;
|
||||
}
|
||||
|
||||
for (int i = 0; i < buffer_count; i++) {
|
||||
dec_frames[i] = av_frame_alloc();
|
||||
if (dec_frames[i] == NULL) {
|
||||
fprintf(stderr, "Couldn't allocate frame");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef HAVE_VAAPI
|
||||
if (ffmpeg_decoder == VAAPI)
|
||||
vaapi_init(decoder_ctx);
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// This function must be called after
|
||||
// decoding is finished
|
||||
void ffmpeg_destroy(void) {
|
||||
if (decoder_ctx) {
|
||||
avcodec_close(decoder_ctx);
|
||||
av_free(decoder_ctx);
|
||||
decoder_ctx = NULL;
|
||||
}
|
||||
if (dec_frames) {
|
||||
for (int i = 0; i < dec_frames_cnt; i++) {
|
||||
if (dec_frames[i])
|
||||
av_frame_free(&dec_frames[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AVFrame* ffmpeg_get_frame(bool native_frame) {
|
||||
int err = avcodec_receive_frame(decoder_ctx, dec_frames[next_frame]);
|
||||
if (err == 0) {
|
||||
current_frame = next_frame;
|
||||
next_frame = (current_frame+1) % dec_frames_cnt;
|
||||
|
||||
if (ffmpeg_decoder == SOFTWARE || native_frame)
|
||||
return dec_frames[current_frame];
|
||||
} else if (err != AVERROR(EAGAIN)) {
|
||||
char errorstring[512];
|
||||
av_strerror(err, errorstring, sizeof(errorstring));
|
||||
fprintf(stderr, "Receive failed - %d/%s\n", err, errorstring);
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// packets must be decoded in order
|
||||
// indata must be inlen + AV_INPUT_BUFFER_PADDING_SIZE in length
|
||||
int ffmpeg_decode(unsigned char* indata, int inlen) {
|
||||
int err;
|
||||
|
||||
pkt.data = indata;
|
||||
pkt.size = inlen;
|
||||
|
||||
err = avcodec_send_packet(decoder_ctx, &pkt);
|
||||
if (err < 0) {
|
||||
char errorstring[512];
|
||||
av_strerror(err, errorstring, sizeof(errorstring));
|
||||
fprintf(stderr, "Decode failed - %s\n", errorstring);
|
||||
}
|
||||
|
||||
return err < 0 ? err : 0;
|
||||
}
|
|
@ -1,48 +0,0 @@
|
|||
/*
|
||||
* This file is part of Moonlight Embedded.
|
||||
*
|
||||
* Based on Moonlight Pc implementation
|
||||
*
|
||||
* Moonlight is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Moonlight is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Moonlight; if not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <stdbool.h>
|
||||
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
// Disables the deblocking filter at the cost of image quality
|
||||
#define DISABLE_LOOP_FILTER 0x1
|
||||
// Uses the low latency decode flag (disables multithreading)
|
||||
#define LOW_LATENCY_DECODE 0x2
|
||||
// Threads process each slice, rather than each frame
|
||||
#define SLICE_THREADING 0x4
|
||||
// Uses nonstandard speedup tricks
|
||||
#define FAST_DECODE 0x8
|
||||
// Uses bilinear filtering instead of bicubic
|
||||
#define BILINEAR_FILTERING 0x10
|
||||
// Uses a faster bilinear filtering with lower image quality
|
||||
#define FAST_BILINEAR_FILTERING 0x20
|
||||
// Uses hardware acceleration
|
||||
#define VDPAU_ACCELERATION 0x40
|
||||
#define VAAPI_ACCELERATION 0x80
|
||||
|
||||
enum decoders {SOFTWARE, VDPAU, VAAPI};
|
||||
extern enum decoders ffmpeg_decoder;
|
||||
|
||||
int ffmpeg_init(int videoFormat, int width, int height, int perf_lvl, int buffer_count, int thread_count);
|
||||
void ffmpeg_destroy(void);
|
||||
|
||||
int ffmpeg_draw_frame(AVFrame *pict);
|
||||
AVFrame* ffmpeg_get_frame(bool native_frame);
|
||||
int ffmpeg_decode(unsigned char* indata, int inlen);
|
|
@ -1,178 +0,0 @@
|
|||
#include "gl_render.h"
|
||||
#include "libretro.h"
|
||||
#include "glsym.h"
|
||||
|
||||
static const char *vertex_shader_string = "\
|
||||
#version 140\n\
|
||||
in vec2 position;\n\
|
||||
out mediump vec2 tex_position;\n\
|
||||
\
|
||||
void main() {\n\
|
||||
gl_Position = vec4(position, 1, 1);\n\
|
||||
tex_position = vec2((position.x + 1.0) / 2.0, (1.0 - position.y) / 2.0);\n\
|
||||
}";
|
||||
|
||||
static const char *fragment_shader_string = "\
|
||||
#version 140\n\
|
||||
uniform lowp sampler2D ymap;\n\
|
||||
uniform lowp sampler2D umap;\n\
|
||||
uniform lowp sampler2D vmap;\n\
|
||||
uniform mat3 yuvmat;\n\
|
||||
uniform vec3 offset;\n\
|
||||
in mediump vec2 tex_position;\n\
|
||||
out vec4 FragColor;\n\
|
||||
\
|
||||
void main() {\n\
|
||||
vec3 YCbCr = vec3(\n\
|
||||
texture(ymap, tex_position).r,\n\
|
||||
texture(umap, tex_position).r - 0.0,\n\
|
||||
texture(vmap, tex_position).r - 0.0\n\
|
||||
);\n\
|
||||
YCbCr -= offset;\n\
|
||||
FragColor = vec4(clamp(yuvmat * YCbCr, 0.0, 1.0), 1.0);\n\
|
||||
}";
|
||||
|
||||
static const float vertices[] = {
|
||||
-1.f, -1.f,
|
||||
1.f, -1.f,
|
||||
-1.f, 1.f,
|
||||
1.f, 1.f
|
||||
};
|
||||
|
||||
static const char* texture_mappings[] = { "ymap", "umap", "vmap" };
|
||||
static GLuint texture_id[3], texture_uniform[3];
|
||||
static GLuint shader_program;
|
||||
static GLuint vbo, vao;
|
||||
static int gl_render_width = 0, gl_render_height = 0;
|
||||
static int yuvmat_location, offset_location;
|
||||
|
||||
void gl_render_init() {
|
||||
shader_program = glCreateProgram();
|
||||
GLuint vert = glCreateShader(GL_VERTEX_SHADER);
|
||||
GLuint frag = glCreateShader(GL_FRAGMENT_SHADER);
|
||||
|
||||
glShaderSource(vert, 1, &vertex_shader_string, 0);
|
||||
glCompileShader(vert);
|
||||
|
||||
glShaderSource(frag, 1, &fragment_shader_string, 0);
|
||||
glCompileShader(frag);
|
||||
|
||||
glAttachShader(shader_program, vert);
|
||||
glAttachShader(shader_program, frag);
|
||||
glLinkProgram(shader_program);
|
||||
glDeleteShader(vert);
|
||||
glDeleteShader(frag);
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
texture_uniform[i] = glGetUniformLocation(shader_program, texture_mappings[i]);
|
||||
}
|
||||
|
||||
glGenBuffers(1, &vbo);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vbo);
|
||||
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
|
||||
|
||||
glGenVertexArrays(1, &vao);
|
||||
glBindVertexArray(vao);
|
||||
|
||||
glUseProgram(shader_program);
|
||||
|
||||
int position_location = glGetAttribLocation(shader_program, "position");
|
||||
glEnableVertexAttribArray(position_location);
|
||||
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
|
||||
|
||||
yuvmat_location = glGetUniformLocation(shader_program, "yuvmat");
|
||||
offset_location = glGetUniformLocation(shader_program, "offset");
|
||||
}
|
||||
|
||||
void gl_render_setup(int width, int height) {
|
||||
if (gl_render_width != width || gl_render_height != height) {
|
||||
glDeleteTextures(3, texture_id);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
gl_render_width = width;
|
||||
gl_render_height = height;
|
||||
|
||||
glGenTextures(3, texture_id);
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
glBindTexture(GL_TEXTURE_2D, texture_id[i]);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, i > 0 ? width / 2 : width, i > 0 ? height / 2 : height, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
|
||||
}
|
||||
}
|
||||
|
||||
const float* gl_color_offset(bool color_full) {
|
||||
static const float limitedOffsets[] = { 16.0f / 255.0f, 128.0f / 255.0f, 128.0f / 255.0f };
|
||||
static const float fullOffsets[] = { 0.0f, 128.0f / 255.0f, 128.0f / 255.0f };
|
||||
return color_full ? fullOffsets : limitedOffsets;
|
||||
}
|
||||
|
||||
const float* gl_color_matrix(enum AVColorSpace color_space, bool color_full) {
|
||||
static const float bt601Lim[] = {
|
||||
1.1644f, 1.1644f, 1.1644f,
|
||||
0.0f, -0.3917f, 2.0172f,
|
||||
1.5960f, -0.8129f, 0.0f
|
||||
};
|
||||
static const float bt601Full[] = {
|
||||
1.0f, 1.0f, 1.0f,
|
||||
0.0f, -0.3441f, 1.7720f,
|
||||
1.4020f, -0.7141f, 0.0f
|
||||
};
|
||||
static const float bt709Lim[] = {
|
||||
1.1644f, 1.1644f, 1.1644f,
|
||||
0.0f, -0.2132f, 2.1124f,
|
||||
1.7927f, -0.5329f, 0.0f
|
||||
};
|
||||
static const float bt709Full[] = {
|
||||
1.0f, 1.0f, 1.0f,
|
||||
0.0f, -0.1873f, 1.8556f,
|
||||
1.5748f, -0.4681f, 0.0f
|
||||
};
|
||||
static const float bt2020Lim[] = {
|
||||
1.1644f, 1.1644f, 1.1644f,
|
||||
0.0f, -0.1874f, 2.1418f,
|
||||
1.6781f, -0.6505f, 0.0f
|
||||
};
|
||||
static const float bt2020Full[] = {
|
||||
1.0f, 1.0f, 1.0f,
|
||||
0.0f, -0.1646f, 1.8814f,
|
||||
1.4746f, -0.5714f, 0.0f
|
||||
};
|
||||
|
||||
switch (color_space) {
|
||||
case AVCOL_SPC_SMPTE170M:
|
||||
case AVCOL_SPC_BT470BG:
|
||||
return color_full ? bt601Full : bt601Lim;
|
||||
case AVCOL_SPC_BT709:
|
||||
return color_full ? bt709Full : bt709Lim;
|
||||
case AVCOL_SPC_BT2020_NCL:
|
||||
case AVCOL_SPC_BT2020_CL:
|
||||
return color_full ? bt2020Full : bt2020Lim;
|
||||
default:
|
||||
return bt601Lim;
|
||||
};
|
||||
}
|
||||
|
||||
void gl_render_draw(uint8_t* image[3], enum AVColorSpace color_space, enum AVColorRange color_range) {
|
||||
glClearColor(0, 0, 0, 1);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
glUseProgram(shader_program);
|
||||
|
||||
glUniform3fv(offset_location, 1, gl_color_offset(color_range == AVCOL_RANGE_JPEG));
|
||||
glUniformMatrix3fv(yuvmat_location, 1, GL_FALSE, gl_color_matrix(color_space, color_range == AVCOL_RANGE_JPEG));
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
glActiveTexture(GL_TEXTURE0 + i);
|
||||
glBindTexture(GL_TEXTURE_2D, texture_id[i]);
|
||||
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, i > 0 ? gl_render_width / 2 : gl_render_width, i > 0 ? gl_render_height / 2 : gl_render_height, GL_RED, GL_UNSIGNED_BYTE, image[i]);
|
||||
glUniform1i(texture_uniform[i], i);
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
}
|
||||
|
||||
glBindVertexArray(vao);
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
#include <stdio.h>
|
||||
#include <stdint.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
#define EXTERN extern "C"
|
||||
#else
|
||||
#define EXTERN
|
||||
#endif
|
||||
|
||||
EXTERN void gl_render_init();
|
||||
EXTERN void gl_render_setup(int width, int height);
|
||||
EXTERN void gl_render_draw(uint8_t* image[3], enum AVColorSpace color_space, enum AVColorRange color_range);
|
|
@ -1,65 +0,0 @@
|
|||
#include "video_decoder.h"
|
||||
#include "ffmpeg.h"
|
||||
|
||||
#define DECODER_BUFFER_SIZE 92 * 1024
|
||||
|
||||
static char* ffmpeg_buffer;
|
||||
AVFrame* frame = NULL;
|
||||
volatile int frame_is_ready = 0;
|
||||
pthread_mutex_t mutex;
|
||||
|
||||
static int video_decoder_setup(int videoFormat, int width, int height, int redrawRate, void* context, int drFlags) {
|
||||
pthread_mutex_init(&mutex, NULL);
|
||||
|
||||
printf("decoder_setup: %ix%i\n", width, height);
|
||||
|
||||
int avc_flags = SLICE_THREADING;
|
||||
|
||||
if (ffmpeg_init(videoFormat, width, height, avc_flags, 2, 2) < 0) {
|
||||
fprintf(stderr, "Couldn't initialize video decoding\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
ffmpeg_buffer = malloc(DECODER_BUFFER_SIZE + AV_INPUT_BUFFER_PADDING_SIZE);
|
||||
if (ffmpeg_buffer == NULL) {
|
||||
fprintf(stderr, "Not enough memory\n");
|
||||
ffmpeg_destroy();
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void video_decoder_cleanup() {
|
||||
//TODO: free ffmpeg_buffer?
|
||||
ffmpeg_destroy();
|
||||
frame = NULL;
|
||||
}
|
||||
|
||||
static int video_decoder_submit_decode_unit(PDECODE_UNIT decodeUnit) {
|
||||
if (decodeUnit->fullLength < DECODER_BUFFER_SIZE) {
|
||||
PLENTRY entry = decodeUnit->bufferList;
|
||||
int length = 0;
|
||||
while (entry != NULL) {
|
||||
memcpy(ffmpeg_buffer+length, entry->data, entry->length);
|
||||
length += entry->length;
|
||||
entry = entry->next;
|
||||
}
|
||||
ffmpeg_decode(ffmpeg_buffer, length);
|
||||
|
||||
if (pthread_mutex_lock(&mutex) == 0) {
|
||||
frame = ffmpeg_get_frame(false);
|
||||
|
||||
// Push event!!
|
||||
pthread_mutex_unlock(&mutex);
|
||||
}
|
||||
}
|
||||
return DR_OK;
|
||||
}
|
||||
|
||||
DECODER_RENDERER_CALLBACKS video_decoder_callbacks = {
|
||||
.setup = video_decoder_setup,
|
||||
.cleanup = video_decoder_cleanup,
|
||||
.submitDecodeUnit = video_decoder_submit_decode_unit,
|
||||
.capabilities = CAPABILITY_SLICES_PER_FRAME(4) | CAPABILITY_REFERENCE_FRAME_INVALIDATION_AVC | CAPABILITY_REFERENCE_FRAME_INVALIDATION_HEVC | CAPABILITY_DIRECT_SUBMIT,
|
||||
};
|
|
@ -1,8 +0,0 @@
|
|||
#include "Limelight.h"
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <pthread.h>
|
||||
|
||||
extern volatile int frame_is_ready;
|
||||
extern pthread_mutex_t mutex;
|
||||
extern AVFrame* frame;
|
||||
extern DECODER_RENDERER_CALLBACKS video_decoder_callbacks;
|
|
@ -5,7 +5,6 @@
|
|||
#include "Application.hpp"
|
||||
#include "Settings.hpp"
|
||||
#include "Limelight.h"
|
||||
#include "gl_render.h"
|
||||
#include "libretro.h"
|
||||
#include "InputController.hpp"
|
||||
|
||||
|
@ -66,8 +65,6 @@ int main(int argc, const char * argv[]) {
|
|||
rglgen_resolve_symbols(glfwGetProcAddress);
|
||||
glfwSwapInterval(1);
|
||||
|
||||
gl_render_init();
|
||||
|
||||
glfwSetCursorPosCallback(window, [](GLFWwindow *w, double x, double y) {
|
||||
mouse_x = x;
|
||||
mouse_y = y;
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
|
||||
#include "glsym/glsym.h"
|
||||
#include "libretro.h"
|
||||
#include "gl_render.h"
|
||||
#include "Application.hpp"
|
||||
#include "InputController.hpp"
|
||||
#include "Settings.hpp"
|
||||
|
@ -58,8 +57,6 @@ void moonlight_init(int width, int height) {
|
|||
|
||||
moonlight_is_initialized = true;
|
||||
|
||||
gl_render_init();
|
||||
|
||||
nanogui::init();
|
||||
app = new Application(Size(width, height), Size(width, height));
|
||||
|
||||
|
|
246
src/streaming/MoonlightSession.cpp
Normal file
246
src/streaming/MoonlightSession.cpp
Normal file
|
@ -0,0 +1,246 @@
|
|||
#include "MoonlightSession.hpp"
|
||||
#include "GameStreamClient.hpp"
|
||||
#include "Settings.hpp"
|
||||
#include "Log.h"
|
||||
#include <nanogui/nanogui.h>
|
||||
|
||||
static MoonlightSession* m_active_session = nullptr;
|
||||
|
||||
MoonlightSession::MoonlightSession(const std::string &address, int app_id) {
|
||||
m_address = address;
|
||||
m_app_id = app_id;
|
||||
|
||||
m_active_session = this;
|
||||
}
|
||||
|
||||
MoonlightSession::~MoonlightSession() {
|
||||
if (m_video_decoder) {
|
||||
delete m_video_decoder;
|
||||
}
|
||||
|
||||
if (m_video_renderer) {
|
||||
delete m_video_renderer;
|
||||
}
|
||||
|
||||
if (m_audio_renderer) {
|
||||
delete m_audio_renderer;
|
||||
}
|
||||
|
||||
m_active_session = nullptr;
|
||||
}
|
||||
|
||||
// MARK: Connection callbacks
|
||||
|
||||
static const char* stages[] = {
|
||||
"STAGE_NONE",
|
||||
"STAGE_PLATFORM_INIT",
|
||||
"STAGE_NAME_RESOLUTION",
|
||||
"STAGE_RTSP_HANDSHAKE",
|
||||
"STAGE_CONTROL_STREAM_INIT",
|
||||
"STAGE_VIDEO_STREAM_INIT",
|
||||
"STAGE_AUDIO_STREAM_INIT",
|
||||
"STAGE_INPUT_STREAM_INIT",
|
||||
"STAGE_CONTROL_STREAM_START",
|
||||
"STAGE_VIDEO_STREAM_START",
|
||||
"STAGE_AUDIO_STREAM_START",
|
||||
"STAGE_INPUT_STREAM_START"
|
||||
};
|
||||
|
||||
void MoonlightSession::connection_stage_starting(int stage) {
|
||||
//LOG_FMT("Starting: %s\n", stages[stage]);
|
||||
}
|
||||
|
||||
void MoonlightSession::connection_stage_complete(int stage) {
|
||||
//LOG_FMT("Complete: %s\n", stages[stage]);
|
||||
}
|
||||
|
||||
void MoonlightSession::connection_stage_failed(int stage, int error_code) {
|
||||
//LOG_FMT("Failed: %s\n", stages[stage]);
|
||||
}
|
||||
|
||||
void MoonlightSession::connection_started() {
|
||||
//LOG("Connection started\n");
|
||||
}
|
||||
|
||||
void MoonlightSession::connection_terminated(int error_code) {
|
||||
//LOG("Connection terminated...\n");
|
||||
|
||||
if (m_active_session) {
|
||||
m_active_session->m_is_active = false;
|
||||
}
|
||||
}
|
||||
|
||||
void MoonlightSession::connection_log_message(const char* format, ...) {
|
||||
va_list list;
|
||||
va_start(list, format);
|
||||
vprintf(format, list);
|
||||
va_end(list);
|
||||
}
|
||||
|
||||
void MoonlightSession::connection_rumble(unsigned short controller, unsigned short low_freq_motor, unsigned short high_freq_motor) {
|
||||
|
||||
}
|
||||
|
||||
void MoonlightSession::connection_status_update(int connection_status) {
|
||||
|
||||
}
|
||||
|
||||
// MARK: Video decoder callbacks
|
||||
|
||||
int MoonlightSession::video_decoder_setup(int video_format, int width, int height, int redraw_rate, void* context, int dr_flags) {
|
||||
if (m_active_session && m_active_session->m_video_decoder) {
|
||||
return m_active_session->m_video_decoder->setup(video_format, width, height, redraw_rate, context, dr_flags);
|
||||
}
|
||||
return DR_OK;
|
||||
}
|
||||
|
||||
void MoonlightSession::video_decoder_start() {
|
||||
if (m_active_session && m_active_session->m_video_decoder) {
|
||||
m_active_session->m_video_decoder->start();
|
||||
}
|
||||
}
|
||||
|
||||
void MoonlightSession::video_decoder_stop() {
|
||||
if (m_active_session && m_active_session->m_video_decoder) {
|
||||
m_active_session->m_video_decoder->stop();
|
||||
}
|
||||
}
|
||||
|
||||
void MoonlightSession::video_decoder_cleanup() {
|
||||
if (m_active_session && m_active_session->m_video_decoder) {
|
||||
m_active_session->m_video_decoder->cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
int MoonlightSession::video_decoder_submit_decode_unit(PDECODE_UNIT decode_unit) {
|
||||
if (m_active_session && m_active_session->m_video_decoder) {
|
||||
return m_active_session->m_video_decoder->submit_decode_unit(decode_unit);
|
||||
}
|
||||
return DR_OK;
|
||||
}
|
||||
|
||||
// MARK: Audio callbacks
|
||||
|
||||
int MoonlightSession::audio_renderer_init(int audio_configuration, const POPUS_MULTISTREAM_CONFIGURATION opus_config, void* context, int ar_flags) {
|
||||
if (m_active_session && m_active_session->m_audio_renderer) {
|
||||
return m_active_session->m_audio_renderer->init(audio_configuration, opus_config, context, ar_flags);
|
||||
}
|
||||
return DR_OK;
|
||||
}
|
||||
|
||||
void MoonlightSession::audio_renderer_start() {
|
||||
if (m_active_session && m_active_session->m_audio_renderer) {
|
||||
m_active_session->m_audio_renderer->start();
|
||||
}
|
||||
}
|
||||
|
||||
void MoonlightSession::audio_renderer_stop() {
|
||||
if (m_active_session && m_active_session->m_audio_renderer) {
|
||||
m_active_session->m_audio_renderer->stop();
|
||||
}
|
||||
}
|
||||
|
||||
void MoonlightSession::audio_renderer_cleanup() {
|
||||
if (m_active_session && m_active_session->m_audio_renderer) {
|
||||
m_active_session->m_audio_renderer->cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
void MoonlightSession::audio_renderer_decode_and_play_sample(char* sample_data, int sample_length) {
|
||||
if (m_active_session && m_active_session->m_audio_renderer) {
|
||||
m_active_session->m_audio_renderer->decode_and_play_sample(sample_data, sample_length);
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: MoonlightSession
|
||||
|
||||
void MoonlightSession::start(std::function<void(bool)> callback) {
|
||||
LiInitializeStreamConfiguration(&m_config);
|
||||
|
||||
int h = Settings::settings()->resolution();
|
||||
int w = h * 16 / 9;
|
||||
m_config.width = w;
|
||||
m_config.height = h;
|
||||
m_config.fps = Settings::settings()->fps();
|
||||
m_config.audioConfiguration = AUDIO_CONFIGURATION_STEREO;
|
||||
m_config.packetSize = 1392;
|
||||
m_config.streamingRemotely = STREAM_CFG_LOCAL;
|
||||
m_config.bitrate = Settings::settings()->bitrate();
|
||||
|
||||
switch (Settings::settings()->video_codec()) {
|
||||
case H264:
|
||||
m_config.supportsHevc = 0;
|
||||
break;
|
||||
case H265:
|
||||
m_config.supportsHevc = 1;
|
||||
m_config.hevcBitratePercentageMultiplier = 75;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
LiInitializeConnectionCallbacks(&m_connection_callbacks);
|
||||
m_connection_callbacks.stageStarting = connection_stage_starting;
|
||||
m_connection_callbacks.stageComplete = connection_stage_complete;
|
||||
m_connection_callbacks.stageFailed = connection_stage_failed;
|
||||
m_connection_callbacks.connectionStarted = connection_started;
|
||||
m_connection_callbacks.connectionTerminated = connection_terminated;
|
||||
m_connection_callbacks.logMessage = connection_log_message;
|
||||
m_connection_callbacks.rumble = connection_rumble;
|
||||
m_connection_callbacks.connectionStatusUpdate = connection_status_update;
|
||||
|
||||
LiInitializeVideoCallbacks(&m_video_callbacks);
|
||||
m_video_callbacks.setup = video_decoder_setup;
|
||||
m_video_callbacks.start = video_decoder_start;
|
||||
m_video_callbacks.stop = video_decoder_stop;
|
||||
m_video_callbacks.cleanup = video_decoder_cleanup;
|
||||
m_video_callbacks.submitDecodeUnit = video_decoder_submit_decode_unit;
|
||||
|
||||
if (m_video_decoder) {
|
||||
m_video_callbacks.capabilities = m_video_decoder->capabilities();
|
||||
}
|
||||
|
||||
LiInitializeAudioCallbacks(&m_audio_callbacks);
|
||||
m_audio_callbacks.init = audio_renderer_init;
|
||||
m_audio_callbacks.start = audio_renderer_start;
|
||||
m_audio_callbacks.stop = audio_renderer_stop;
|
||||
m_audio_callbacks.cleanup = audio_renderer_cleanup;
|
||||
m_audio_callbacks.decodeAndPlaySample = audio_renderer_decode_and_play_sample;
|
||||
|
||||
if (m_audio_renderer) {
|
||||
m_audio_callbacks.capabilities = m_audio_renderer->capabilities();
|
||||
}
|
||||
|
||||
GameStreamClient::client()->start(m_address, m_config, m_app_id, [this, callback](auto result) {
|
||||
if (result.isSuccess()) {
|
||||
m_config = result.value();
|
||||
|
||||
perform_async([this, callback] {
|
||||
auto m_data = GameStreamClient::client()->server_data(m_address);
|
||||
LiStartConnection(&m_data.serverInfo, &m_config, &m_connection_callbacks, &m_video_callbacks, &m_audio_callbacks, NULL, 0, NULL, 0);
|
||||
|
||||
nanogui::async([this, callback] {
|
||||
callback(true);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
callback(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void MoonlightSession::stop(int terminate_app) {
|
||||
if (terminate_app) {
|
||||
GameStreamClient::client()->quit(m_address, [](auto _) {});
|
||||
}
|
||||
|
||||
LiStopConnection();
|
||||
}
|
||||
|
||||
void MoonlightSession::draw() {
|
||||
if (m_video_decoder && m_video_renderer) {
|
||||
if (auto frame = m_video_decoder->frame()) {
|
||||
m_video_renderer->draw(m_config.width, m_config.height, frame);
|
||||
}
|
||||
}
|
||||
}
|
67
src/streaming/MoonlightSession.hpp
Normal file
67
src/streaming/MoonlightSession.hpp
Normal file
|
@ -0,0 +1,67 @@
|
|||
#include "GameStreamClient.hpp"
|
||||
#include "IAudioRenderer.hpp"
|
||||
#include "IVideoRenderer.hpp"
|
||||
#include "IFFmpegVideoDecoder.hpp"
|
||||
#pragma once
|
||||
|
||||
class MoonlightSession {
|
||||
public:
|
||||
MoonlightSession(const std::string &address, int app_id);
|
||||
~MoonlightSession();
|
||||
|
||||
void set_video_decoder(IFFmpegVideoDecoder* video_decoder) {
|
||||
m_video_decoder = video_decoder;
|
||||
}
|
||||
|
||||
void set_video_renderer(IVideoRenderer* video_renderer) {
|
||||
m_video_renderer = video_renderer;
|
||||
}
|
||||
|
||||
void set_audio_renderer(IAudioRenderer* audio_renderer) {
|
||||
m_audio_renderer = audio_renderer;
|
||||
}
|
||||
|
||||
void start(std::function<void(bool)> callback);
|
||||
void stop(int terminate_app);
|
||||
|
||||
void draw();
|
||||
|
||||
bool is_active() const {
|
||||
return m_is_active;
|
||||
}
|
||||
|
||||
private:
|
||||
static void connection_stage_starting(int);
|
||||
static void connection_stage_complete(int);
|
||||
static void connection_stage_failed(int, int);
|
||||
static void connection_started();
|
||||
static void connection_terminated(int);
|
||||
static void connection_log_message(const char* format, ...);
|
||||
static void connection_rumble(unsigned short, unsigned short, unsigned short);
|
||||
static void connection_status_update(int);
|
||||
|
||||
static int video_decoder_setup(int, int, int, int, void*, int);
|
||||
static void video_decoder_start();
|
||||
static void video_decoder_stop();
|
||||
static void video_decoder_cleanup();
|
||||
static int video_decoder_submit_decode_unit(PDECODE_UNIT);
|
||||
|
||||
static int audio_renderer_init(int, const POPUS_MULTISTREAM_CONFIGURATION, void*, int);
|
||||
static void audio_renderer_start();
|
||||
static void audio_renderer_stop();
|
||||
static void audio_renderer_cleanup();
|
||||
static void audio_renderer_decode_and_play_sample(char*, int);
|
||||
|
||||
std::string m_address;
|
||||
int m_app_id;
|
||||
STREAM_CONFIGURATION m_config;
|
||||
CONNECTION_LISTENER_CALLBACKS m_connection_callbacks;
|
||||
DECODER_RENDERER_CALLBACKS m_video_callbacks;
|
||||
AUDIO_RENDERER_CALLBACKS m_audio_callbacks;
|
||||
|
||||
IFFmpegVideoDecoder* m_video_decoder = nullptr;
|
||||
IVideoRenderer* m_video_renderer = nullptr;
|
||||
IAudioRenderer* m_audio_renderer = nullptr;
|
||||
|
||||
bool m_is_active = true;
|
||||
};
|
13
src/streaming/audio/IAudioRenderer.hpp
Normal file
13
src/streaming/audio/IAudioRenderer.hpp
Normal file
|
@ -0,0 +1,13 @@
|
|||
#include <Limelight.h>
|
||||
#pragma once
|
||||
|
||||
class IAudioRenderer {
|
||||
public:
|
||||
virtual ~IAudioRenderer() {};
|
||||
virtual int init(int audio_configuration, const POPUS_MULTISTREAM_CONFIGURATION opus_config, void* context, int ar_flags) = 0;
|
||||
virtual void start() {};
|
||||
virtual void stop() {};
|
||||
virtual void cleanup() = 0;
|
||||
virtual void decode_and_play_sample(char* sample_data, int sample_length) = 0;
|
||||
virtual int capabilities() = 0;
|
||||
};
|
44
src/streaming/audio/RetroAudioRenderer.cpp
Normal file
44
src/streaming/audio/RetroAudioRenderer.cpp
Normal file
|
@ -0,0 +1,44 @@
|
|||
#include "RetroAudioRenderer.hpp"
|
||||
#include "libretro.h"
|
||||
#include <cstdlib>
|
||||
|
||||
#define MAX_CHANNEL_COUNT 6
|
||||
#define FRAME_SIZE 240
|
||||
|
||||
extern retro_audio_sample_batch_t audio_batch_cb;
|
||||
|
||||
RetroAudioRenderer::~RetroAudioRenderer() {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
int RetroAudioRenderer::init(int audio_configuration, const POPUS_MULTISTREAM_CONFIGURATION opus_config, void *context, int ar_flags) {
|
||||
int error;
|
||||
m_decoder = opus_multistream_decoder_create(opus_config->sampleRate, opus_config->channelCount, opus_config->streams, opus_config->coupledStreams, opus_config->mapping, &error);
|
||||
m_buffer = (short *)malloc(FRAME_SIZE * MAX_CHANNEL_COUNT * sizeof(short));
|
||||
return DR_OK;
|
||||
}
|
||||
|
||||
void RetroAudioRenderer::cleanup() {
|
||||
if (m_decoder) {
|
||||
opus_multistream_decoder_destroy(m_decoder);
|
||||
m_decoder = nullptr;
|
||||
}
|
||||
|
||||
if (m_buffer) {
|
||||
free(m_buffer);
|
||||
m_buffer = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void RetroAudioRenderer::decode_and_play_sample(char *data, int length) {
|
||||
if (audio_batch_cb) {
|
||||
int decode_len = opus_multistream_decode(m_decoder, (const unsigned char *)data, length, m_buffer, FRAME_SIZE, 0);
|
||||
if (decode_len > 0) {
|
||||
audio_batch_cb(m_buffer, decode_len);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int RetroAudioRenderer::capabilities() {
|
||||
return CAPABILITY_DIRECT_SUBMIT;
|
||||
}
|
18
src/streaming/audio/RetroAudioRenderer.hpp
Normal file
18
src/streaming/audio/RetroAudioRenderer.hpp
Normal file
|
@ -0,0 +1,18 @@
|
|||
#include "IAudioRenderer.hpp"
|
||||
#include <opus/opus_multistream.h>
|
||||
#pragma once
|
||||
|
||||
class RetroAudioRenderer: public IAudioRenderer {
|
||||
public:
|
||||
RetroAudioRenderer() {};
|
||||
~RetroAudioRenderer();
|
||||
|
||||
int init(int audio_configuration, const POPUS_MULTISTREAM_CONFIGURATION opus_config, void *context, int ar_flags) override;
|
||||
void cleanup() override;
|
||||
void decode_and_play_sample(char *sample_data, int sample_length) override;
|
||||
int capabilities() override;
|
||||
|
||||
private:
|
||||
OpusMSDecoder* m_decoder = nullptr;
|
||||
short* m_buffer = nullptr;
|
||||
};
|
195
src/streaming/ffmpeg/FFmpegVideoDecoder.cpp
Normal file
195
src/streaming/ffmpeg/FFmpegVideoDecoder.cpp
Normal file
|
@ -0,0 +1,195 @@
|
|||
#include "FFmpegVideoDecoder.hpp"
|
||||
#include "Log.h"
|
||||
|
||||
// Disables the deblocking filter at the cost of image quality
|
||||
#define DISABLE_LOOP_FILTER 0x1
|
||||
// Uses the low latency decode flag (disables multithreading)
|
||||
#define LOW_LATENCY_DECODE 0x2
|
||||
// Threads process each slice, rather than each frame
|
||||
#define SLICE_THREADING 0x4
|
||||
// Uses nonstandard speedup tricks
|
||||
#define FAST_DECODE 0x8
|
||||
// Uses bilinear filtering instead of bicubic
|
||||
#define BILINEAR_FILTERING 0x10
|
||||
// Uses a faster bilinear filtering with lower image quality
|
||||
#define FAST_BILINEAR_FILTERING 0x20
|
||||
|
||||
#define DECODER_BUFFER_SIZE 92 * 1024
|
||||
|
||||
FFmpegVideoDecoder::FFmpegVideoDecoder(IFFmpegHardwareVideoDecoder* hardware_video_decoder) {
|
||||
m_hardware_video_decoder = hardware_video_decoder;
|
||||
|
||||
pthread_mutex_init(&m_mutex, NULL);
|
||||
}
|
||||
|
||||
FFmpegVideoDecoder::~FFmpegVideoDecoder() {
|
||||
pthread_mutex_destroy(&m_mutex);
|
||||
cleanup();
|
||||
|
||||
if (m_hardware_video_decoder) {
|
||||
delete m_hardware_video_decoder;
|
||||
}
|
||||
}
|
||||
|
||||
int FFmpegVideoDecoder::setup(int video_format, int width, int height, int redraw_rate, void *context, int dr_flags) {
|
||||
av_log_set_level(AV_LOG_QUIET);
|
||||
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(58,10,100)
|
||||
avcodec_register_all();
|
||||
#endif
|
||||
|
||||
av_init_packet(&m_packet);
|
||||
|
||||
int perf_lvl = SLICE_THREADING;
|
||||
|
||||
//ffmpeg_decoder = perf_lvl & VAAPI_ACCELERATION ? VAAPI : SOFTWARE;
|
||||
switch (video_format) {
|
||||
case VIDEO_FORMAT_H264:
|
||||
m_decoder = avcodec_find_decoder_by_name("h264");
|
||||
break;
|
||||
case VIDEO_FORMAT_H265:
|
||||
m_decoder = avcodec_find_decoder_by_name("hevc");
|
||||
break;
|
||||
}
|
||||
|
||||
if (m_decoder == NULL) {
|
||||
LOG("Couldn't find decoder\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
m_decoder_context = avcodec_alloc_context3(m_decoder);
|
||||
if (m_decoder_context == NULL) {
|
||||
LOG("Couldn't allocate context\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (perf_lvl & DISABLE_LOOP_FILTER)
|
||||
// Skip the loop filter for performance reasons
|
||||
m_decoder_context->skip_loop_filter = AVDISCARD_ALL;
|
||||
|
||||
if (perf_lvl & LOW_LATENCY_DECODE)
|
||||
// Use low delay single threaded encoding
|
||||
m_decoder_context->flags |= AV_CODEC_FLAG_LOW_DELAY;
|
||||
|
||||
if (perf_lvl & SLICE_THREADING)
|
||||
m_decoder_context->thread_type = FF_THREAD_SLICE;
|
||||
else
|
||||
m_decoder_context->thread_type = FF_THREAD_FRAME;
|
||||
|
||||
m_decoder_context->thread_count = 2;
|
||||
|
||||
m_decoder_context->width = width;
|
||||
m_decoder_context->height = height;
|
||||
m_decoder_context->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||
|
||||
int err = avcodec_open2(m_decoder_context, m_decoder, NULL);
|
||||
if (err < 0) {
|
||||
LOG("Couldn't open codec\n");
|
||||
return err;
|
||||
}
|
||||
|
||||
m_frames_count = 2;
|
||||
m_frames = (AVFrame**)malloc(m_frames_count * sizeof(AVFrame*));
|
||||
if (m_frames == NULL) {
|
||||
LOG("Couldn't allocate frames\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
for (int i = 0; i < m_frames_count; i++) {
|
||||
m_frames[i] = av_frame_alloc();
|
||||
if (m_frames[i] == NULL) {
|
||||
LOG("Couldn't allocate frame\n");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
m_ffmpeg_buffer = (char*)malloc(DECODER_BUFFER_SIZE + AV_INPUT_BUFFER_PADDING_SIZE);
|
||||
if (m_ffmpeg_buffer == NULL) {
|
||||
LOG("Not enough memory\n");
|
||||
cleanup();
|
||||
return -1;
|
||||
}
|
||||
|
||||
return DR_OK;
|
||||
}
|
||||
|
||||
void FFmpegVideoDecoder::cleanup() {
|
||||
if (m_decoder_context) {
|
||||
avcodec_close(m_decoder_context);
|
||||
av_free(m_decoder_context);
|
||||
m_decoder_context = NULL;
|
||||
}
|
||||
|
||||
if (m_frames) {
|
||||
for (int i = 0; i < m_frames_count; i++) {
|
||||
if (m_frames[i])
|
||||
av_frame_free(&m_frames[i]);
|
||||
}
|
||||
|
||||
free(m_frames);
|
||||
m_frames = nullptr;
|
||||
}
|
||||
|
||||
if (m_ffmpeg_buffer) {
|
||||
free(m_ffmpeg_buffer);
|
||||
m_ffmpeg_buffer = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
int FFmpegVideoDecoder::submit_decode_unit(PDECODE_UNIT decode_unit) {
|
||||
if (decode_unit->fullLength < DECODER_BUFFER_SIZE) {
|
||||
PLENTRY entry = decode_unit->bufferList;
|
||||
int length = 0;
|
||||
while (entry != NULL) {
|
||||
memcpy(m_ffmpeg_buffer + length, entry->data, entry->length);
|
||||
length += entry->length;
|
||||
entry = entry->next;
|
||||
}
|
||||
decode(m_ffmpeg_buffer, length);
|
||||
|
||||
if (pthread_mutex_lock(&m_mutex) == 0) {
|
||||
m_frame = get_frame(true);
|
||||
|
||||
// Push event!!
|
||||
pthread_mutex_unlock(&m_mutex);
|
||||
}
|
||||
}
|
||||
return DR_OK;
|
||||
}
|
||||
|
||||
int FFmpegVideoDecoder::capabilities() const {
|
||||
return CAPABILITY_SLICES_PER_FRAME(4) | CAPABILITY_REFERENCE_FRAME_INVALIDATION_AVC | CAPABILITY_REFERENCE_FRAME_INVALIDATION_HEVC | CAPABILITY_DIRECT_SUBMIT;
|
||||
}
|
||||
|
||||
int FFmpegVideoDecoder::decode(char* indata, int inlen) {
|
||||
m_packet.data = (uint8_t *)indata;
|
||||
m_packet.size = inlen;
|
||||
|
||||
int err = avcodec_send_packet(m_decoder_context, &m_packet);
|
||||
if (err < 0) {
|
||||
char error[512];
|
||||
av_strerror(err, error, sizeof(error));
|
||||
LOG_FMT("Decode failed - %s\n", error);
|
||||
}
|
||||
|
||||
return err < 0 ? err : 0;
|
||||
}
|
||||
|
||||
AVFrame* FFmpegVideoDecoder::get_frame(bool native_frame) {
|
||||
int err = avcodec_receive_frame(m_decoder_context, m_frames[m_next_frame]);
|
||||
if (err == 0) {
|
||||
m_current_frame = m_next_frame;
|
||||
m_next_frame = (m_current_frame + 1) % m_frames_count;
|
||||
|
||||
if (/*ffmpeg_decoder == SOFTWARE ||*/ native_frame)
|
||||
return m_frames[m_current_frame];
|
||||
} else if (err != AVERROR(EAGAIN)) {
|
||||
char error[512];
|
||||
av_strerror(err, error, sizeof(error));
|
||||
LOG_FMT("Receive failed - %d/%s\n", err, error);
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
AVFrame* FFmpegVideoDecoder::frame() const {
|
||||
return m_frame;
|
||||
}
|
39
src/streaming/ffmpeg/FFmpegVideoDecoder.hpp
Normal file
39
src/streaming/ffmpeg/FFmpegVideoDecoder.hpp
Normal file
|
@ -0,0 +1,39 @@
|
|||
#include "IFFmpegVideoDecoder.hpp"
|
||||
#include <pthread.h>
|
||||
#pragma once
|
||||
|
||||
class IFFmpegHardwareVideoDecoder {
|
||||
public:
|
||||
virtual ~IFFmpegHardwareVideoDecoder() {};
|
||||
virtual bool prepare_decoder_context(AVCodecContext* context, AVDictionary** options) = 0;
|
||||
};
|
||||
|
||||
class FFmpegVideoDecoder: public IFFmpegVideoDecoder {
|
||||
public:
|
||||
FFmpegVideoDecoder(IFFmpegHardwareVideoDecoder* hardware_video_decoder = nullptr);
|
||||
~FFmpegVideoDecoder();
|
||||
|
||||
int setup(int video_format, int width, int height, int redraw_rate, void *context, int dr_flags) override;
|
||||
void cleanup() override;
|
||||
int submit_decode_unit(PDECODE_UNIT decode_unit) override;
|
||||
int capabilities() const override;
|
||||
AVFrame* frame() const override;
|
||||
|
||||
private:
|
||||
int decode(char* indata, int inlen);
|
||||
AVFrame* get_frame(bool native_frame);
|
||||
|
||||
IFFmpegHardwareVideoDecoder* m_hardware_video_decoder;
|
||||
|
||||
AVPacket m_packet;
|
||||
AVCodec* m_decoder = nullptr;
|
||||
AVCodecContext* m_decoder_context = nullptr;
|
||||
AVFrame** m_frames = nullptr;
|
||||
|
||||
int m_frames_count;
|
||||
int m_current_frame = 0, m_next_frame = 0;
|
||||
|
||||
char* m_ffmpeg_buffer = nullptr;
|
||||
AVFrame* m_frame = nullptr;
|
||||
pthread_mutex_t m_mutex;
|
||||
};
|
18
src/streaming/ffmpeg/IFFmpegVideoDecoder.hpp
Normal file
18
src/streaming/ffmpeg/IFFmpegVideoDecoder.hpp
Normal file
|
@ -0,0 +1,18 @@
|
|||
#include <Limelight.h>
|
||||
#pragma once
|
||||
|
||||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
}
|
||||
|
||||
class IFFmpegVideoDecoder {
|
||||
public:
|
||||
virtual ~IFFmpegVideoDecoder() {};
|
||||
virtual int setup(int video_format, int width, int height, int redraw_rate, void* context, int dr_flags) = 0;
|
||||
virtual void start() {};
|
||||
virtual void stop() {};
|
||||
virtual void cleanup() = 0;
|
||||
virtual int submit_decode_unit(PDECODE_UNIT decode_unit) = 0;
|
||||
virtual int capabilities() const = 0;
|
||||
virtual AVFrame* frame() const = 0;
|
||||
};
|
197
src/streaming/video/GLVideoRenderer.cpp
Normal file
197
src/streaming/video/GLVideoRenderer.cpp
Normal file
|
@ -0,0 +1,197 @@
|
|||
#include "GLVideoRenderer.hpp"
|
||||
|
||||
static const char *vertex_shader_string = "\
|
||||
#version 140\n\
|
||||
in vec2 position;\n\
|
||||
out mediump vec2 tex_position;\n\
|
||||
\
|
||||
void main() {\n\
|
||||
gl_Position = vec4(position, 1, 1);\n\
|
||||
tex_position = vec2((position.x + 1.0) / 2.0, (1.0 - position.y) / 2.0);\n\
|
||||
}";
|
||||
|
||||
static const char *fragment_shader_string = "\
|
||||
#version 140\n\
|
||||
uniform lowp sampler2D ymap;\n\
|
||||
uniform lowp sampler2D umap;\n\
|
||||
uniform lowp sampler2D vmap;\n\
|
||||
uniform mat3 yuvmat;\n\
|
||||
uniform vec3 offset;\n\
|
||||
in mediump vec2 tex_position;\n\
|
||||
out vec4 FragColor;\n\
|
||||
\
|
||||
void main() {\n\
|
||||
vec3 YCbCr = vec3(\n\
|
||||
texture(ymap, tex_position).r,\n\
|
||||
texture(umap, tex_position).r - 0.0,\n\
|
||||
texture(vmap, tex_position).r - 0.0\n\
|
||||
);\n\
|
||||
YCbCr -= offset;\n\
|
||||
FragColor = vec4(clamp(yuvmat * YCbCr, 0.0, 1.0), 1.0);\n\
|
||||
}";
|
||||
|
||||
static const float vertices[] = {
|
||||
-1.f, -1.f,
|
||||
1.f, -1.f,
|
||||
-1.f, 1.f,
|
||||
1.f, 1.f
|
||||
};
|
||||
|
||||
static const char* texture_mappings[] = { "ymap", "umap", "vmap" };
|
||||
|
||||
static const float* gl_color_offset(bool color_full) {
|
||||
static const float limitedOffsets[] = { 16.0f / 255.0f, 128.0f / 255.0f, 128.0f / 255.0f };
|
||||
static const float fullOffsets[] = { 0.0f, 128.0f / 255.0f, 128.0f / 255.0f };
|
||||
return color_full ? fullOffsets : limitedOffsets;
|
||||
}
|
||||
|
||||
static const float* gl_color_matrix(enum AVColorSpace color_space, bool color_full) {
|
||||
static const float bt601Lim[] = {
|
||||
1.1644f, 1.1644f, 1.1644f,
|
||||
0.0f, -0.3917f, 2.0172f,
|
||||
1.5960f, -0.8129f, 0.0f
|
||||
};
|
||||
static const float bt601Full[] = {
|
||||
1.0f, 1.0f, 1.0f,
|
||||
0.0f, -0.3441f, 1.7720f,
|
||||
1.4020f, -0.7141f, 0.0f
|
||||
};
|
||||
static const float bt709Lim[] = {
|
||||
1.1644f, 1.1644f, 1.1644f,
|
||||
0.0f, -0.2132f, 2.1124f,
|
||||
1.7927f, -0.5329f, 0.0f
|
||||
};
|
||||
static const float bt709Full[] = {
|
||||
1.0f, 1.0f, 1.0f,
|
||||
0.0f, -0.1873f, 1.8556f,
|
||||
1.5748f, -0.4681f, 0.0f
|
||||
};
|
||||
static const float bt2020Lim[] = {
|
||||
1.1644f, 1.1644f, 1.1644f,
|
||||
0.0f, -0.1874f, 2.1418f,
|
||||
1.6781f, -0.6505f, 0.0f
|
||||
};
|
||||
static const float bt2020Full[] = {
|
||||
1.0f, 1.0f, 1.0f,
|
||||
0.0f, -0.1646f, 1.8814f,
|
||||
1.4746f, -0.5714f, 0.0f
|
||||
};
|
||||
|
||||
switch (color_space) {
|
||||
case AVCOL_SPC_SMPTE170M:
|
||||
case AVCOL_SPC_BT470BG:
|
||||
return color_full ? bt601Full : bt601Lim;
|
||||
case AVCOL_SPC_BT709:
|
||||
return color_full ? bt709Full : bt709Lim;
|
||||
case AVCOL_SPC_BT2020_NCL:
|
||||
case AVCOL_SPC_BT2020_CL:
|
||||
return color_full ? bt2020Full : bt2020Lim;
|
||||
default:
|
||||
return bt601Lim;
|
||||
};
|
||||
}
|
||||
|
||||
GLVideoRenderer::~GLVideoRenderer() {
|
||||
if (m_shader_program) {
|
||||
glDeleteProgram(m_shader_program);
|
||||
}
|
||||
|
||||
if (m_vbo) {
|
||||
glDeleteBuffers(1, &m_vbo);
|
||||
}
|
||||
|
||||
if (m_vao) {
|
||||
glDeleteVertexArrays(1, &m_vao);
|
||||
}
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
if (m_texture_id[i]) {
|
||||
glDeleteTextures(1, &m_texture_id[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void GLVideoRenderer::initialize() {
|
||||
m_shader_program = glCreateProgram();
|
||||
GLuint vert = glCreateShader(GL_VERTEX_SHADER);
|
||||
GLuint frag = glCreateShader(GL_FRAGMENT_SHADER);
|
||||
|
||||
glShaderSource(vert, 1, &vertex_shader_string, 0);
|
||||
glCompileShader(vert);
|
||||
|
||||
glShaderSource(frag, 1, &fragment_shader_string, 0);
|
||||
glCompileShader(frag);
|
||||
|
||||
glAttachShader(m_shader_program, vert);
|
||||
glAttachShader(m_shader_program, frag);
|
||||
glLinkProgram(m_shader_program);
|
||||
glDeleteShader(vert);
|
||||
glDeleteShader(frag);
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
m_texture_uniform[i] = glGetUniformLocation(m_shader_program, texture_mappings[i]);
|
||||
}
|
||||
|
||||
glGenBuffers(1, &m_vbo);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, m_vbo);
|
||||
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
|
||||
|
||||
glGenVertexArrays(1, &m_vao);
|
||||
glBindVertexArray(m_vao);
|
||||
|
||||
glUseProgram(m_shader_program);
|
||||
|
||||
int position_location = glGetAttribLocation(m_shader_program, "position");
|
||||
glEnableVertexAttribArray(position_location);
|
||||
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
|
||||
|
||||
m_yuvmat_location = glGetUniformLocation(m_shader_program, "yuvmat");
|
||||
m_offset_location = glGetUniformLocation(m_shader_program, "offset");
|
||||
}
|
||||
|
||||
void GLVideoRenderer::draw(int width, int height, AVFrame *frame) {
|
||||
if (!m_is_initialized) {
|
||||
initialize();
|
||||
m_is_initialized = true;
|
||||
}
|
||||
|
||||
if (m_width != frame->width || m_height != frame->height) {
|
||||
m_width = frame->width;
|
||||
m_height = frame->height;
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
if (m_texture_id[i]) {
|
||||
glDeleteTextures(1, &m_texture_id[i]);
|
||||
}
|
||||
}
|
||||
|
||||
glGenTextures(3, m_texture_id);
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
glBindTexture(GL_TEXTURE_2D, m_texture_id[i]);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, i > 0 ? m_width / 2 : m_width, i > 0 ? m_height / 2 : m_height, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
|
||||
}
|
||||
}
|
||||
|
||||
glClearColor(0, 0, 0, 1);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
glUseProgram(m_shader_program);
|
||||
|
||||
glUniform3fv(m_offset_location, 1, gl_color_offset(frame->color_range == AVCOL_RANGE_JPEG));
|
||||
glUniformMatrix3fv(m_yuvmat_location, 1, GL_FALSE, gl_color_matrix(frame->colorspace, frame->color_range == AVCOL_RANGE_JPEG));
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
auto image = frame->data[i];
|
||||
glActiveTexture(GL_TEXTURE0 + i);
|
||||
glBindTexture(GL_TEXTURE_2D, m_texture_id[i]);
|
||||
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, i > 0 ? m_width / 2 : m_width, i > 0 ? m_height / 2 : m_height, GL_RED, GL_UNSIGNED_BYTE, image);
|
||||
glUniform1i(m_texture_uniform[i], i);
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
}
|
||||
|
||||
glBindVertexArray(m_vao);
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
}
|
21
src/streaming/video/GLVideoRenderer.hpp
Normal file
21
src/streaming/video/GLVideoRenderer.hpp
Normal file
|
@ -0,0 +1,21 @@
|
|||
#include "IVideoRenderer.hpp"
|
||||
#include "glsym.h"
|
||||
#pragma once
|
||||
|
||||
class GLVideoRenderer: public IVideoRenderer {
|
||||
public:
|
||||
GLVideoRenderer() {};
|
||||
~GLVideoRenderer();
|
||||
|
||||
void draw(int width, int height, AVFrame *frame) override;
|
||||
|
||||
private:
|
||||
void initialize();
|
||||
|
||||
bool m_is_initialized = false;
|
||||
GLuint m_texture_id[3] = {0, 0, 0}, m_texture_uniform[3];
|
||||
GLuint m_shader_program;
|
||||
GLuint m_vbo, m_vao;
|
||||
int m_width = 0, m_height = 0;
|
||||
int m_yuvmat_location, m_offset_location;
|
||||
};
|
12
src/streaming/video/IVideoRenderer.hpp
Normal file
12
src/streaming/video/IVideoRenderer.hpp
Normal file
|
@ -0,0 +1,12 @@
|
|||
#include <Limelight.h>
|
||||
#pragma once
|
||||
|
||||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
}
|
||||
|
||||
class IVideoRenderer {
|
||||
public:
|
||||
virtual ~IVideoRenderer() {};
|
||||
virtual void draw(int width, int height, AVFrame* frame) = 0;
|
||||
};
|
|
@ -1,113 +1,52 @@
|
|||
#include "StreamWindow.hpp"
|
||||
#include "LoadingOverlay.hpp"
|
||||
#include "GameStreamClient.hpp"
|
||||
#include "Settings.hpp"
|
||||
#include "InputController.hpp"
|
||||
#include "gl_render.h"
|
||||
#include "video_decoder.h"
|
||||
#include "audio_decoder.h"
|
||||
#include "FFmpegVideoDecoder.hpp"
|
||||
#include "GLVideoRenderer.hpp"
|
||||
#include "RetroAudioRenderer.hpp"
|
||||
#include "nanovg.h"
|
||||
#include "libretro.h"
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
|
||||
using namespace nanogui;
|
||||
|
||||
static std::weak_ptr<StreamWindow *> _weak;
|
||||
|
||||
StreamWindow::StreamWindow(Widget *parent, const std::string &address, int app_id): Widget(parent) {
|
||||
_weak = std::make_shared<StreamWindow *>(this);
|
||||
|
||||
m_address = address;
|
||||
m_app_id = app_id;
|
||||
m_connection_status_is_poor = false;
|
||||
m_size = parent->size();
|
||||
m_session = new MoonlightSession(address, app_id);
|
||||
m_session->set_video_decoder(new FFmpegVideoDecoder());
|
||||
m_session->set_video_renderer(new GLVideoRenderer());
|
||||
|
||||
LiInitializeStreamConfiguration(&m_config);
|
||||
|
||||
int h = Settings::settings()->resolution();
|
||||
int w = h * 16 / 9;
|
||||
m_config.width = w;
|
||||
m_config.height = h;
|
||||
m_config.fps = Settings::settings()->fps();
|
||||
m_config.audioConfiguration = AUDIO_CONFIGURATION_STEREO;
|
||||
m_config.packetSize = 1392;
|
||||
m_config.streamingRemotely = STREAM_CFG_LOCAL;
|
||||
m_config.bitrate = Settings::settings()->bitrate();
|
||||
|
||||
switch (Settings::settings()->video_codec()) {
|
||||
case H264:
|
||||
m_config.supportsHevc = 0;
|
||||
break;
|
||||
case H265:
|
||||
m_config.supportsHevc = 1;
|
||||
m_config.hevcBitratePercentageMultiplier = 75;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
#ifdef __LIBRETRO__
|
||||
m_session->set_audio_renderer(new RetroAudioRenderer());
|
||||
#endif
|
||||
|
||||
m_loader = add<LoadingOverlay>();
|
||||
|
||||
GameStreamClient::client()->start(m_address, m_config, m_app_id, [this](auto result) {
|
||||
if (result.isSuccess()) {
|
||||
m_config = result.value();
|
||||
setup_stream();
|
||||
m_session->start([this](auto result) {
|
||||
if (result) {
|
||||
if (m_loader) {
|
||||
m_loader->dispose();
|
||||
m_loader = NULL;
|
||||
}
|
||||
} else {
|
||||
m_loader->dispose();
|
||||
screen()->add<MessageDialog>(MessageDialog::Type::Information, "Error", result.error());
|
||||
|
||||
auto app = static_cast<Application *>(screen());
|
||||
app->pop_window();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void StreamWindow::setup_stream() {
|
||||
perform_async([this] {
|
||||
CONNECTION_LISTENER_CALLBACKS connection_listener_callbacks = {
|
||||
.stageStarting = NULL,
|
||||
.stageComplete = NULL,
|
||||
.stageFailed = NULL,
|
||||
.connectionStarted = NULL,
|
||||
.connectionTerminated = [](int errorCode) {
|
||||
if (auto stream = _weak.lock()) {
|
||||
(*stream)->terminate(true);
|
||||
}
|
||||
},
|
||||
.logMessage = NULL,
|
||||
.rumble = NULL,
|
||||
.connectionStatusUpdate = [](int status) {
|
||||
if (auto stream = _weak.lock()) {
|
||||
if (status == CONN_STATUS_POOR) {
|
||||
(*stream)->m_connection_status_is_poor = true;
|
||||
} else {
|
||||
(*stream)->m_connection_status_is_poor = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
auto m_data = GameStreamClient::client()->server_data(m_address);
|
||||
LiStartConnection(&m_data.serverInfo, &m_config, &connection_listener_callbacks, &video_decoder_callbacks, &audio_decoder_callbacks, NULL, 0, NULL, 0);
|
||||
|
||||
async([this] {
|
||||
if (m_loader) {
|
||||
m_loader->dispose();
|
||||
m_loader = NULL;
|
||||
}
|
||||
});
|
||||
});
|
||||
StreamWindow::~StreamWindow() {
|
||||
delete m_session;
|
||||
}
|
||||
|
||||
void StreamWindow::draw(NVGcontext *ctx) {
|
||||
if (!m_session->is_active()) {
|
||||
async([this] { this->terminate(false); });
|
||||
}
|
||||
|
||||
nvgSave(ctx);
|
||||
|
||||
gl_render_setup(m_config.width, m_config.height);
|
||||
|
||||
if (frame != NULL) {
|
||||
gl_render_draw(frame->data, frame->colorspace, frame->color_range);
|
||||
}
|
||||
m_session->draw();
|
||||
|
||||
nvgRestore(ctx);
|
||||
|
||||
|
@ -145,11 +84,7 @@ void StreamWindow::terminate(bool close_app) {
|
|||
m_loader = NULL;
|
||||
}
|
||||
|
||||
if (close_app) {
|
||||
GameStreamClient::client()->quit(m_address, [](auto _) {});
|
||||
}
|
||||
|
||||
LiStopConnection();
|
||||
m_session->stop(close_app);
|
||||
|
||||
if (auto app = dynamic_cast<Application *>(screen())) {
|
||||
app->pop_window();
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
#include "ContentWindow.hpp"
|
||||
#include "GameStreamClient.hpp"
|
||||
#include "LoadingOverlay.hpp"
|
||||
#include "MoonlightSession.hpp"
|
||||
#pragma once
|
||||
|
||||
class StreamWindow: public nanogui::Widget {
|
||||
public:
|
||||
StreamWindow(Widget *parent, const std::string &address, int app_id);
|
||||
|
||||
void setup_stream();
|
||||
~StreamWindow();
|
||||
|
||||
void draw(NVGcontext *ctx) override;
|
||||
|
||||
|
@ -17,9 +17,7 @@ public:
|
|||
void terminate(bool close_app);
|
||||
|
||||
private:
|
||||
std::string m_address;
|
||||
int m_app_id;
|
||||
STREAM_CONFIGURATION m_config;
|
||||
MoonlightSession* m_session;
|
||||
LoadingOverlay* m_loader;
|
||||
bool m_connection_status_is_poor;
|
||||
bool m_connection_status_is_poor = false;
|
||||
};
|
||||
|
|
Loading…
Add table
Reference in a new issue