App streaming pipeline, dashboard server status, account enable/disable, game-linked plans

- Add C++ native streaming engine (RTMP client, EGL context, streaming engine, JNI bridge)
- Add pre-built arm64-v8a libs (librtmp, libssl, libcrypto, libz) and headers
- Add Kotlin streaming layer (NativeStreamingEngine, StreamingManager, StreamingStats)
- Add AIDL streaming interface (ILckStreamingService, ILckStreamingCallback, StreamingConfig)
- Add LckStreamingServiceImpl with BIND_STREAMING action support
- Add APP_STREAMING execution mode with auto-start/stop on plan lifecycle
- SDK: add bindStreaming(), submitVideoFrame(), submitAudioFrame() to LckControlClient
- Dashboard: replace linked accounts with server status card, move health polling from nav
- Remove health check dot overlay from Dashboard nav icon
- Accounts: add enable/disable toggle per account (persists locally, excluded from default plans)
- Plans: add gameId field linked to game package ID, resolved from ClientTracker for default plans
- Service: pass executionMode+gameId through createStreamPlan, filter enabled accounts in createDefaultPlan
- Room DB migration 4→5: add isEnabled column to linked_accounts, gameId column to stream_plans
- Add docs (hub vs control comparison)
This commit is contained in:
2026-02-28 20:05:21 +01:00
parent 1480a2944b
commit 097cd24ea9
59 changed files with 13609 additions and 89 deletions

View File

@@ -60,9 +60,20 @@ android {
buildConfigField("String", "DISPLAY_VERSION", "\"${gitDisplayVersion()}\"") buildConfigField("String", "DISPLAY_VERSION", "\"${gitDisplayVersion()}\"")
ndk {
abiFilters += listOf("arm64-v8a")
}
testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
} }
externalNativeBuild {
cmake {
path = file("src/main/cpp/CMakeLists.txt")
version = "3.22.1"
}
}
buildTypes { buildTypes {
debug { debug {
signingConfig = signingConfigs.getByName("release") signingConfig = signingConfigs.getByName("release")

View File

@@ -69,6 +69,9 @@
<intent-filter> <intent-filter>
<action android:name="com.omixlab.lckcontrol.BIND" /> <action android:name="com.omixlab.lckcontrol.BIND" />
</intent-filter> </intent-filter>
<intent-filter>
<action android:name="com.omixlab.lckcontrol.BIND_STREAMING" />
</intent-filter>
</service> </service>
</application> </application>

View File

@@ -0,0 +1,49 @@
cmake_minimum_required(VERSION 3.22.1)
project(lck_streaming)
find_library(log-lib log)
find_library(android-lib android)
find_library(mediandk-lib mediandk)
find_library(egl-lib EGL)
find_library(glesv3-lib GLESv3)
find_library(nativewindow-lib nativewindow)
add_library(lck_streaming SHARED
jni_bridge.cpp
rtmp_client.cpp
rtmp_sink.cpp
egl_context.cpp
streaming_engine.cpp
)
target_include_directories(lck_streaming PRIVATE
${CMAKE_SOURCE_DIR}/third_party/librtmp/include
)
# Import pre-built librtmp from jniLibs
add_library(rtmp SHARED IMPORTED)
set_target_properties(rtmp PROPERTIES
IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/librtmp.so
)
add_library(ssl SHARED IMPORTED)
set_target_properties(ssl PROPERTIES
IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libssl.so
)
add_library(crypto SHARED IMPORTED)
set_target_properties(crypto PROPERTIES
IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libcrypto.so
)
target_link_libraries(lck_streaming
${log-lib}
${android-lib}
${mediandk-lib}
${egl-lib}
${glesv3-lib}
${nativewindow-lib}
rtmp
ssl
crypto
)

View File

@@ -0,0 +1,219 @@
#include "egl_context.h"
#include <android/log.h>
#include <unistd.h>
#define TAG "LckEglContext"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
#ifndef EGL_NATIVE_BUFFER_ANDROID
#define EGL_NATIVE_BUFFER_ANDROID 0x3140
#endif
#ifndef EGL_SYNC_NATIVE_FENCE_ANDROID
#define EGL_SYNC_NATIVE_FENCE_ANDROID 0x3144
#endif
#ifndef EGL_SYNC_NATIVE_FENCE_FD_ANDROID
#define EGL_SYNC_NATIVE_FENCE_FD_ANDROID 0x3145
#endif
#ifndef EGL_RECORDABLE_ANDROID
#define EGL_RECORDABLE_ANDROID 0x3142
#endif
EglContext::EglContext() {}
EglContext::~EglContext() {
Release();
}
bool EglContext::LoadExtensions() {
eglCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC)eglGetProcAddress("eglCreateSyncKHR");
eglWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC)eglGetProcAddress("eglWaitSyncKHR");
eglDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC)eglGetProcAddress("eglDestroySyncKHR");
eglGetNativeClientBufferANDROID = (PFNEGLGETNATIVECLIENTBUFFERANDROIDPROC)eglGetProcAddress("eglGetNativeClientBufferANDROID");
eglCreateImageKHR = (PFNEGLCREATEIMAGEKHRPROC)eglGetProcAddress("eglCreateImageKHR");
eglDestroyImageKHR = (PFNEGLDESTROYIMAGEKHRPROC)eglGetProcAddress("eglDestroyImageKHR");
glEGLImageTargetTexture2DOES = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)eglGetProcAddress("glEGLImageTargetTexture2DOES");
eglPresentationTimeANDROID = (PFNEGLPRESENTATIONTIMEANDROIDPROC)eglGetProcAddress("eglPresentationTimeANDROID");
if (!eglGetNativeClientBufferANDROID || !eglCreateImageKHR ||
!eglDestroyImageKHR || !glEGLImageTargetTexture2DOES) {
LOGE("Missing required EGL extensions for HardwareBuffer import");
return false;
}
return true;
}
bool EglContext::Init() {
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (display == EGL_NO_DISPLAY) {
LOGE("eglGetDisplay failed");
return false;
}
EGLint major, minor;
if (!eglInitialize(display, &major, &minor)) {
LOGE("eglInitialize failed");
return false;
}
LOGI("EGL initialized: %d.%d", major, minor);
// EGL config: RGBA8, ES3, recordable for MediaCodec
EGLint configAttribs[] = {
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES3_BIT,
EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RECORDABLE_ANDROID, EGL_TRUE,
EGL_NONE
};
EGLint numConfigs;
if (!eglChooseConfig(display, configAttribs, &config, 1, &numConfigs) || numConfigs == 0) {
LOGE("eglChooseConfig failed");
return false;
}
EGLint contextAttribs[] = {
EGL_CONTEXT_CLIENT_VERSION, 3,
EGL_NONE
};
context = eglCreateContext(display, config, EGL_NO_CONTEXT, contextAttribs);
if (context == EGL_NO_CONTEXT) {
LOGE("eglCreateContext failed");
return false;
}
if (!LoadExtensions()) {
return false;
}
LOGI("EGL context created successfully");
return true;
}
bool EglContext::CreateWindowSurface(ANativeWindow* window) {
if (surface != EGL_NO_SURFACE) {
eglDestroySurface(display, surface);
}
surface = eglCreateWindowSurface(display, config, window, nullptr);
if (surface == EGL_NO_SURFACE) {
LOGE("eglCreateWindowSurface failed: 0x%x", eglGetError());
return false;
}
eglQuerySurface(display, surface, EGL_WIDTH, &surfaceWidth);
eglQuerySurface(display, surface, EGL_HEIGHT, &surfaceHeight);
LOGI("EGL window surface created: %dx%d", surfaceWidth, surfaceHeight);
return true;
}
GLuint EglContext::ImportHardwareBuffer(AHardwareBuffer* buffer) {
if (!eglGetNativeClientBufferANDROID || !eglCreateImageKHR || !glEGLImageTargetTexture2DOES) {
LOGE("Missing EGL extensions for HardwareBuffer import");
return 0;
}
EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(buffer);
if (!clientBuffer) {
LOGE("eglGetNativeClientBufferANDROID failed");
return 0;
}
EGLint imageAttribs[] = {
EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
EGL_NONE
};
EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT,
EGL_NATIVE_BUFFER_ANDROID,
clientBuffer, imageAttribs);
if (image == EGL_NO_IMAGE_KHR) {
LOGE("eglCreateImageKHR failed: 0x%x", eglGetError());
return 0;
}
GLuint textureId;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image);
// We need to keep the image alive — store it associated with the texture
// The caller must call ReleaseImportedTexture to clean up
// For now, we destroy the image immediately since the texture retains the content
eglDestroyImageKHR(display, image);
return textureId;
}
void EglContext::ReleaseImportedTexture(GLuint textureId, EGLImageKHR image) {
if (textureId) {
glDeleteTextures(1, &textureId);
}
if (image != EGL_NO_IMAGE_KHR && eglDestroyImageKHR) {
eglDestroyImageKHR(display, image);
}
}
void EglContext::WaitFence(int fenceFd) {
if (fenceFd < 0) return;
if (eglCreateSyncKHR && eglWaitSyncKHR && eglDestroySyncKHR) {
EGLint attribs[] = {
EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fenceFd,
EGL_NONE
};
EGLSyncKHR sync = eglCreateSyncKHR(display, EGL_SYNC_NATIVE_FENCE_ANDROID, attribs);
if (sync != EGL_NO_SYNC_KHR) {
// GPU-side wait — doesn't block CPU
eglWaitSyncKHR(display, sync, 0);
eglDestroySyncKHR(display, sync);
// eglCreateSyncKHR takes ownership of fenceFd
return;
}
}
// Fallback: CPU-side wait
close(fenceFd);
}
void EglContext::SetPresentationTime(int64_t timestampNs) {
if (eglPresentationTimeANDROID && surface != EGL_NO_SURFACE) {
eglPresentationTimeANDROID(display, surface, timestampNs);
}
}
bool EglContext::MakeCurrent() {
return eglMakeCurrent(display, surface, surface, context) == EGL_TRUE;
}
bool EglContext::SwapBuffers() {
return eglSwapBuffers(display, surface) == EGL_TRUE;
}
void EglContext::Release() {
if (display != EGL_NO_DISPLAY) {
eglMakeCurrent(display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
if (surface != EGL_NO_SURFACE) {
eglDestroySurface(display, surface);
surface = EGL_NO_SURFACE;
}
if (context != EGL_NO_CONTEXT) {
eglDestroyContext(display, context);
context = EGL_NO_CONTEXT;
}
eglTerminate(display);
display = EGL_NO_DISPLAY;
}
LOGI("EGL resources released");
}

View File

@@ -0,0 +1,69 @@
#pragma once
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES3/gl3.h>
#include <GLES2/gl2ext.h>
#include <android/hardware_buffer.h>
/**
* EGL context for importing HardwareBuffers and blitting to encoder Surface.
* Handles EGL setup, HardwareBuffer→EGLImage→texture import, and fence sync.
*/
class EglContext {
public:
EglContext();
~EglContext();
/** Initialize EGL with a recordable config. Returns true on success. */
bool Init();
/** Create a window surface from an ANativeWindow (encoder input surface). */
bool CreateWindowSurface(ANativeWindow* window);
/** Import a HardwareBuffer as a GL texture. Returns texture ID (0 on failure). */
GLuint ImportHardwareBuffer(AHardwareBuffer* buffer);
/** Release a previously imported HardwareBuffer texture. */
void ReleaseImportedTexture(GLuint textureId, EGLImageKHR image);
/** Wait on a native GPU fence FD. Takes ownership of the FD. */
void WaitFence(int fenceFd);
/** Set presentation time on the current surface. */
void SetPresentationTime(int64_t timestampNs);
/** Make the window surface current. */
bool MakeCurrent();
/** Swap buffers on the window surface. */
bool SwapBuffers();
/** Release all EGL resources. */
void Release();
EGLDisplay GetDisplay() const { return display; }
int GetWidth() const { return surfaceWidth; }
int GetHeight() const { return surfaceHeight; }
private:
EGLDisplay display = EGL_NO_DISPLAY;
EGLContext context = EGL_NO_CONTEXT;
EGLSurface surface = EGL_NO_SURFACE;
EGLConfig config = nullptr;
int surfaceWidth = 0;
int surfaceHeight = 0;
// Extension function pointers
PFNEGLCREATESYNCKHRPROC eglCreateSyncKHR = nullptr;
PFNEGLWAITSYNCKHRPROC eglWaitSyncKHR = nullptr;
PFNEGLDESTROYSYNCKHRPROC eglDestroySyncKHR = nullptr;
PFNEGLGETNATIVECLIENTBUFFERANDROIDPROC eglGetNativeClientBufferANDROID = nullptr;
PFNEGLCREATEIMAGEKHRPROC eglCreateImageKHR = nullptr;
PFNEGLDESTROYIMAGEKHRPROC eglDestroyImageKHR = nullptr;
PFNGLEGLIMAGETARGETTEXTURE2DOESPROC glEGLImageTargetTexture2DOES = nullptr;
PFNEGLPRESENTATIONTIMEANDROIDPROC eglPresentationTimeANDROID = nullptr;
bool LoadExtensions();
};

View File

@@ -0,0 +1,162 @@
#include "streaming_engine.h"
#include <jni.h>
#include <android/hardware_buffer_jni.h>
#include <android/log.h>
#define TAG "LckJniBridge"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
static JavaVM* gJavaVM = nullptr;
// Cache for callback method IDs
static jmethodID gOnStatsMethod = nullptr;
static jmethodID gOnErrorMethod = nullptr;
static jmethodID gOnBufferReleasedMethod = nullptr;
JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved) {
gJavaVM = vm;
return JNI_VERSION_1_6;
}
extern "C" {
JNIEXPORT jlong JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeCreate(
JNIEnv* env, jobject thiz,
jint width, jint height,
jint videoBitrate, jint audioBitrate,
jint sampleRate, jint channels,
jint keyframeInterval) {
auto* engine = new StreamingEngine();
engine->Configure(width, height, videoBitrate, audioBitrate,
sampleRate, channels, keyframeInterval);
// Set up callbacks that call back into Kotlin
jobject globalRef = env->NewGlobalRef(thiz);
// Cache method IDs
jclass cls = env->GetObjectClass(thiz);
gOnStatsMethod = env->GetMethodID(cls, "onNativeStats", "(JJII)V");
gOnErrorMethod = env->GetMethodID(cls, "onNativeError", "(ILjava/lang/String;)V");
gOnBufferReleasedMethod = env->GetMethodID(cls, "onNativeBufferReleased", "(I)V");
engine->SetStatsCallback([globalRef](const StreamingStats& stats) {
JNIEnv* env;
if (gJavaVM->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
if (gJavaVM->AttachCurrentThread(&env, nullptr) != JNI_OK) return;
}
if (gOnStatsMethod) {
env->CallVoidMethod(globalRef, gOnStatsMethod,
(jlong)stats.videoBitrate, (jlong)stats.audioBitrate,
(jint)stats.fps, (jint)stats.droppedFrames);
}
});
engine->SetErrorCallback([globalRef](int code, const std::string& message) {
JNIEnv* env;
if (gJavaVM->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
if (gJavaVM->AttachCurrentThread(&env, nullptr) != JNI_OK) return;
}
if (gOnErrorMethod) {
jstring msg = env->NewStringUTF(message.c_str());
env->CallVoidMethod(globalRef, gOnErrorMethod, (jint)code, msg);
env->DeleteLocalRef(msg);
}
});
engine->SetBufferReleasedCallback([globalRef](int bufferIndex) {
JNIEnv* env;
if (gJavaVM->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
if (gJavaVM->AttachCurrentThread(&env, nullptr) != JNI_OK) return;
}
if (gOnBufferReleasedMethod) {
env->CallVoidMethod(globalRef, gOnBufferReleasedMethod, (jint)bufferIndex);
}
});
LOGI("Native engine created: %dx%d", width, height);
return reinterpret_cast<jlong>(engine);
}
JNIEXPORT jint JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeAddDestination(
JNIEnv* env, jobject thiz, jlong ptr, jstring rtmpUrl) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return -1;
const char* url = env->GetStringUTFChars(rtmpUrl, nullptr);
int index = engine->AddDestination(url);
env->ReleaseStringUTFChars(rtmpUrl, url);
return index;
}
JNIEXPORT jboolean JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeStart(
JNIEnv* env, jobject thiz, jlong ptr) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return JNI_FALSE;
return engine->Start() ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT void JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeSubmitVideoFrame(
JNIEnv* env, jobject thiz, jlong ptr,
jobject hardwareBuffer, jlong timestampNs, jint fenceFd) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return;
AHardwareBuffer* buffer = AHardwareBuffer_fromHardwareBuffer(env, hardwareBuffer);
if (!buffer) {
LOGE("Failed to get AHardwareBuffer from Java HardwareBuffer");
return;
}
engine->SubmitVideoFrame(buffer, timestampNs, fenceFd);
}
JNIEXPORT void JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeSubmitAudioFrame(
JNIEnv* env, jobject thiz, jlong ptr,
jbyteArray pcmData, jlong timestampNs) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return;
jsize len = env->GetArrayLength(pcmData);
jbyte* data = env->GetByteArrayElements(pcmData, nullptr);
engine->SubmitAudioFrame(reinterpret_cast<const uint8_t*>(data), len, timestampNs);
env->ReleaseByteArrayElements(pcmData, data, JNI_ABORT);
}
JNIEXPORT void JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeStop(
JNIEnv* env, jobject thiz, jlong ptr) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return;
engine->Stop();
}
JNIEXPORT void JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeDestroy(
JNIEnv* env, jobject thiz, jlong ptr) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (engine) {
engine->Stop();
delete engine;
LOGI("Native engine destroyed");
}
}
JNIEXPORT jboolean JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeIsRunning(
JNIEnv* env, jobject thiz, jlong ptr) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return JNI_FALSE;
return engine->IsRunning() ? JNI_TRUE : JNI_FALSE;
}
} // extern "C"

View File

@@ -0,0 +1,177 @@
#include "rtmp_client.h"
#include <android/log.h>
#include <cstring>
extern "C" {
#include <librtmp/rtmp.h>
#include <librtmp/log.h>
}
#define TAG "LckRtmpClient"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
RtmpClient::RtmpClient() {}
RtmpClient::~RtmpClient() {
Disconnect();
}
bool RtmpClient::Connect(const std::string& rtmpUrl) {
if (connected) {
LOGW("Already connected, disconnecting first");
Disconnect();
}
RTMP_LogSetLevel(RTMP_LOGWARNING);
rtmpContext = RTMP_Alloc();
if (!rtmpContext) {
LOGE("RTMP_Alloc failed");
return false;
}
RTMP_Init(rtmpContext);
// RTMP_SetupURL needs a mutable char*
std::vector<char> urlBuffer(rtmpUrl.begin(), rtmpUrl.end());
urlBuffer.push_back('\0');
if (!RTMP_SetupURL(rtmpContext, urlBuffer.data())) {
LOGE("RTMP_SetupURL failed");
RTMP_Free(rtmpContext);
rtmpContext = nullptr;
return false;
}
RTMP_EnableWrite(rtmpContext);
if (!RTMP_Connect(rtmpContext, nullptr)) {
LOGE("RTMP_Connect failed");
RTMP_Free(rtmpContext);
rtmpContext = nullptr;
return false;
}
if (!RTMP_ConnectStream(rtmpContext, 0)) {
LOGE("RTMP_ConnectStream failed");
RTMP_Close(rtmpContext);
RTMP_Free(rtmpContext);
rtmpContext = nullptr;
return false;
}
connected = true;
LOGI("RTMP connected");
return true;
}
void RtmpClient::Disconnect() {
if (rtmpContext) {
RTMP_Close(rtmpContext);
RTMP_Free(rtmpContext);
rtmpContext = nullptr;
LOGI("RTMP disconnected");
}
connected = false;
}
bool RtmpClient::IsConnected() const {
return connected && rtmpContext && RTMP_IsConnected(rtmpContext);
}
bool RtmpClient::SendRtmpPacket(uint8_t packetType, uint32_t timestampMs, const uint8_t* data, uint32_t size) {
if (!IsConnected())
return false;
RTMPPacket pkt;
RTMPPacket_Alloc(&pkt, size);
pkt.m_packetType = packetType;
pkt.m_nChannel = (packetType == RTMP_PACKET_TYPE_VIDEO) ? 0x06 : 0x07;
pkt.m_headerType = RTMP_PACKET_SIZE_LARGE;
pkt.m_nTimeStamp = timestampMs;
pkt.m_hasAbsTimestamp = 1;
pkt.m_nInfoField2 = rtmpContext->m_stream_id;
pkt.m_nBodySize = size;
memcpy(pkt.m_body, data, size);
int ret = RTMP_SendPacket(rtmpContext, &pkt, 0);
RTMPPacket_Free(&pkt);
if (!ret) {
LOGW("RTMP_SendPacket failed (type=%d, size=%u)", packetType, size);
connected = false;
}
return ret != 0;
}
bool RtmpClient::SendAvcSequenceHeader(const uint8_t* extraData, uint32_t extraDataSize) {
// FLV video tag: keyframe(1) + AVC(7) = 0x17, AVC sequence header = 0x00, composition time = 0
uint32_t bodySize = 5 + extraDataSize;
std::vector<uint8_t> body(bodySize);
body[0] = 0x17; // keyframe + AVC
body[1] = 0x00; // AVC sequence header
body[2] = 0x00; // composition time
body[3] = 0x00;
body[4] = 0x00;
memcpy(body.data() + 5, extraData, extraDataSize);
return SendRtmpPacket(RTMP_PACKET_TYPE_VIDEO, 0, body.data(), bodySize);
}
void RtmpClient::BuildAudioSpecificConfig(uint8_t outConfig[2], uint32_t sampleRate, uint32_t numChannels) {
static const uint32_t sampleRateTable[] = {
96000, 88200, 64000, 48000, 44100, 32000,
24000, 22050, 16000, 12000, 11025, 8000, 7350
};
uint8_t freqIndex = 4; // default 44100
for (uint8_t i = 0; i < 13; ++i) {
if (sampleRateTable[i] == sampleRate) {
freqIndex = i;
break;
}
}
uint8_t channelConfig = static_cast<uint8_t>(numChannels < 1 ? 1 : (numChannels > 7 ? 7 : numChannels));
// Pack: AAAAA FFFF CCCC 000
outConfig[0] = (2 << 3) | (freqIndex >> 1);
outConfig[1] = ((freqIndex & 1) << 7) | (channelConfig << 3);
}
bool RtmpClient::SendAacSequenceHeader(uint32_t sampleRate, uint32_t numChannels) {
// FLV audio tag: AAC(10) + 44100(3) + 16-bit(1) + stereo(1) = 0xAF, AAC sequence header = 0x00
uint8_t body[4];
body[0] = 0xAF;
body[1] = 0x00;
BuildAudioSpecificConfig(body + 2, sampleRate, numChannels);
return SendRtmpPacket(RTMP_PACKET_TYPE_AUDIO, 0, body, sizeof(body));
}
bool RtmpClient::SendVideoPacket(const uint8_t* data, uint32_t size, uint32_t timestampMs, bool isKeyframe) {
uint32_t bodySize = 5 + size;
std::vector<uint8_t> body(bodySize);
body[0] = isKeyframe ? 0x17 : 0x27;
body[1] = 0x01; // AVC NALU
body[2] = 0x00; // composition time offset
body[3] = 0x00;
body[4] = 0x00;
memcpy(body.data() + 5, data, size);
return SendRtmpPacket(RTMP_PACKET_TYPE_VIDEO, timestampMs, body.data(), bodySize);
}
bool RtmpClient::SendAudioPacket(const uint8_t* data, uint32_t size, uint32_t timestampMs) {
uint32_t bodySize = 2 + size;
std::vector<uint8_t> body(bodySize);
body[0] = 0xAF;
body[1] = 0x01; // AAC raw
memcpy(body.data() + 2, data, size);
return SendRtmpPacket(RTMP_PACKET_TYPE_AUDIO, timestampMs, body.data(), bodySize);
}

View File

@@ -0,0 +1,34 @@
#pragma once
#include <cstdint>
#include <string>
#include <vector>
struct RTMP;
/**
* Low-level librtmp wrapper for RTMP streaming.
* Ported from FLCKRtmpClient (UE5 LCKStreaming plugin).
* All methods should be called from the same thread (encoder thread).
*/
class RtmpClient {
public:
RtmpClient();
~RtmpClient();
bool Connect(const std::string& rtmpUrl);
void Disconnect();
bool IsConnected() const;
bool SendAvcSequenceHeader(const uint8_t* extraData, uint32_t extraDataSize);
bool SendAacSequenceHeader(uint32_t sampleRate, uint32_t numChannels);
bool SendVideoPacket(const uint8_t* data, uint32_t size, uint32_t timestampMs, bool isKeyframe);
bool SendAudioPacket(const uint8_t* data, uint32_t size, uint32_t timestampMs);
private:
bool SendRtmpPacket(uint8_t packetType, uint32_t timestampMs, const uint8_t* data, uint32_t size);
static void BuildAudioSpecificConfig(uint8_t outConfig[2], uint32_t sampleRate, uint32_t numChannels);
RTMP* rtmpContext = nullptr;
bool connected = false;
};

View File

@@ -0,0 +1,278 @@
#include "rtmp_sink.h"
#include <android/log.h>
#include <cstring>
#include <algorithm>
#define TAG "LckRtmpSink"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
RtmpSink::RtmpSink() {}
RtmpSink::~RtmpSink() {
if (isOpen) {
Close();
}
}
void RtmpSink::SetRtmpUrl(const std::string& url) {
rtmpUrl = url;
}
bool RtmpSink::Open(uint32_t width, uint32_t height, uint32_t framerate,
uint32_t sampleRate, uint32_t numChannels) {
if (rtmpUrl.empty()) {
LOGE("RTMP URL not set");
return false;
}
storedSampleRate = sampleRate;
storedNumChannels = numChannels;
if (!rtmpClient.Connect(rtmpUrl)) {
LOGE("Failed to connect RTMP");
return false;
}
isOpen = true;
videoHeaderSent = false;
audioHeaderSent = false;
LOGI("RTMP sink opened: %dx%d@%dfps, %dHz %dch",
width, height, framerate, sampleRate, numChannels);
return true;
}
void RtmpSink::OnVideoFormatReady(const uint8_t* extraData, uint32_t extraDataSize) {
if (!isOpen) return;
if (extraData && extraDataSize > 0) {
// Check if already AVCC format (starts with version byte 0x01)
if (extraDataSize > 4 && extraData[0] == 0x01) {
if (rtmpClient.SendAvcSequenceHeader(extraData, extraDataSize)) {
videoHeaderSent = true;
LOGI("Sent AVC sequence header (AVCC, %u bytes)", extraDataSize);
}
} else {
// Annex-B format - extract and convert
TryExtractAndSendSequenceHeader(extraData, extraDataSize);
}
}
}
void RtmpSink::OnAudioFormatReady(uint32_t sampleRate, uint32_t numChannels) {
if (!isOpen) return;
storedSampleRate = sampleRate;
storedNumChannels = numChannels;
if (rtmpClient.SendAacSequenceHeader(sampleRate, numChannels)) {
audioHeaderSent = true;
LOGI("Sent AAC sequence header (%dHz, %dch)", sampleRate, numChannels);
}
}
void RtmpSink::SendVideoPacket(const uint8_t* data, uint32_t size,
int64_t timestampMs, bool isKeyframe) {
if (!isOpen || !rtmpClient.IsConnected()) return;
// If we haven't sent the video sequence header yet and this is a keyframe,
// try to extract SPS/PPS from it
if (!videoHeaderSent && isKeyframe) {
TryExtractAndSendSequenceHeader(data, size);
}
if (!videoHeaderSent) return;
// Send AAC sequence header on first video packet if not sent yet
if (!audioHeaderSent) {
if (rtmpClient.SendAacSequenceHeader(storedSampleRate, storedNumChannels)) {
audioHeaderSent = true;
LOGI("Sent AAC sequence header (deferred, %dHz, %dch)",
storedSampleRate, storedNumChannels);
}
}
uint32_t ts = static_cast<uint32_t>(std::max<int64_t>(timestampMs, 0));
// Convert Annex-B to AVCC for RTMP/FLV
std::vector<uint8_t> avccData = ConvertAnnexBToAvcc(data, size);
if (!avccData.empty()) {
rtmpClient.SendVideoPacket(avccData.data(), static_cast<uint32_t>(avccData.size()),
ts, isKeyframe);
}
}
void RtmpSink::SendAudioPacket(const uint8_t* data, uint32_t size, int64_t timestampMs) {
if (!isOpen || !rtmpClient.IsConnected()) return;
if (!audioHeaderSent || !videoHeaderSent) return;
uint32_t ts = static_cast<uint32_t>(std::max<int64_t>(timestampMs, 0));
rtmpClient.SendAudioPacket(data, size, ts);
}
void RtmpSink::Close() {
if (isOpen) {
rtmpClient.Disconnect();
isOpen = false;
videoHeaderSent = false;
audioHeaderSent = false;
LOGI("RTMP sink closed");
}
}
bool RtmpSink::IsOpen() const {
return isOpen;
}
bool RtmpSink::TryExtractAndSendSequenceHeader(const uint8_t* data, uint32_t size) {
// Parse Annex-B bitstream to find SPS and PPS NALUs
const uint8_t* sps = nullptr;
uint32_t spsSize = 0;
const uint8_t* pps = nullptr;
uint32_t ppsSize = 0;
const uint8_t* end = data + size;
auto findStartCode = [](const uint8_t* p, const uint8_t* end) -> const uint8_t* {
while (p + 3 <= end) {
if (p[0] == 0 && p[1] == 0) {
if (p[2] == 1) return p + 3;
if (p + 3 < end && p[2] == 0 && p[3] == 1) return p + 4;
}
p++;
}
return nullptr;
};
const uint8_t* pos = findStartCode(data, end);
while (pos && pos < end) {
uint8_t currentNaluType = pos[0] & 0x1F;
const uint8_t* currentNaluStart = pos;
const uint8_t* nextStart = findStartCode(pos, end);
const uint8_t* naluEnd;
if (nextStart) {
naluEnd = nextStart - 3;
if (naluEnd > data && *(naluEnd - 1) == 0) naluEnd--;
} else {
naluEnd = end;
}
uint32_t naluSize = static_cast<uint32_t>(naluEnd - currentNaluStart);
if (currentNaluType == 7 && !sps) { // SPS
sps = currentNaluStart;
spsSize = naluSize;
} else if (currentNaluType == 8 && !pps) { // PPS
pps = currentNaluStart;
ppsSize = naluSize;
}
if (sps && pps) break;
pos = nextStart;
}
if (sps && spsSize > 0 && pps && ppsSize > 0) {
std::vector<uint8_t> avcc = BuildAvccFromAnnexB(sps, spsSize, pps, ppsSize);
if (rtmpClient.SendAvcSequenceHeader(avcc.data(), static_cast<uint32_t>(avcc.size()))) {
videoHeaderSent = true;
LOGI("Sent AVC sequence header (extracted SPS=%u PPS=%u)", spsSize, ppsSize);
return true;
} else {
LOGE("SendAvcSequenceHeader failed (SPS=%u PPS=%u)", spsSize, ppsSize);
}
}
return false;
}
std::vector<uint8_t> RtmpSink::ConvertAnnexBToAvcc(const uint8_t* data, uint32_t size) {
std::vector<uint8_t> result;
result.reserve(size);
auto findStartCode = [](const uint8_t* p, const uint8_t* end, int& startCodeLen) -> const uint8_t* {
while (p + 3 <= end) {
if (p[0] == 0 && p[1] == 0) {
if (p + 3 < end && p[2] == 0 && p[3] == 1) {
startCodeLen = 4;
return p;
}
if (p[2] == 1) {
startCodeLen = 3;
return p;
}
}
p++;
}
return nullptr;
};
const uint8_t* pos = data;
const uint8_t* end = data + size;
int startCodeLen = 0;
const uint8_t* startCode = findStartCode(pos, end, startCodeLen);
if (!startCode) {
// No start codes found - pass through
result.insert(result.end(), data, data + size);
return result;
}
while (startCode) {
const uint8_t* naluStart = startCode + startCodeLen;
if (naluStart >= end) break;
int nextStartCodeLen = 0;
const uint8_t* nextStartCode = findStartCode(naluStart, end, nextStartCodeLen);
uint32_t naluSize = nextStartCode
? static_cast<uint32_t>(nextStartCode - naluStart)
: static_cast<uint32_t>(end - naluStart);
if (naluSize > 0) {
uint8_t naluType = naluStart[0] & 0x1F;
// Skip SPS (7), PPS (8), AUD (9)
if (naluType != 7 && naluType != 8 && naluType != 9) {
result.push_back(static_cast<uint8_t>(naluSize >> 24));
result.push_back(static_cast<uint8_t>(naluSize >> 16));
result.push_back(static_cast<uint8_t>(naluSize >> 8));
result.push_back(static_cast<uint8_t>(naluSize & 0xFF));
result.insert(result.end(), naluStart, naluStart + naluSize);
}
}
startCode = nextStartCode;
startCodeLen = nextStartCodeLen;
}
return result;
}
std::vector<uint8_t> RtmpSink::BuildAvccFromAnnexB(const uint8_t* sps, uint32_t spsSize,
const uint8_t* pps, uint32_t ppsSize) {
// AVCDecoderConfigurationRecord
std::vector<uint8_t> record;
record.reserve(11 + spsSize + ppsSize);
record.push_back(0x01); // configurationVersion
record.push_back(spsSize > 1 ? sps[1] : 0x42); // AVCProfileIndication
record.push_back(spsSize > 2 ? sps[2] : 0x00); // profile_compatibility
record.push_back(spsSize > 3 ? sps[3] : 0x1E); // AVCLevelIndication
record.push_back(0xFF); // lengthSizeMinusOne = 3 (4 bytes)
record.push_back(0xE1); // numOfSequenceParameterSets = 1
// SPS length (big-endian)
record.push_back(static_cast<uint8_t>(spsSize >> 8));
record.push_back(static_cast<uint8_t>(spsSize & 0xFF));
record.insert(record.end(), sps, sps + spsSize);
record.push_back(0x01); // numOfPictureParameterSets = 1
// PPS length (big-endian)
record.push_back(static_cast<uint8_t>(ppsSize >> 8));
record.push_back(static_cast<uint8_t>(ppsSize & 0xFF));
record.insert(record.end(), pps, pps + ppsSize);
return record;
}

View File

@@ -0,0 +1,43 @@
#pragma once
#include "rtmp_client.h"
#include <cstdint>
#include <string>
#include <vector>
/**
* RTMP sink that bridges encoded packets to an RTMP endpoint.
* Ported from FLCKRtmpSink (UE5 LCKStreaming plugin).
* Handles Annex-B to AVCC conversion, sequence headers, and FLV framing.
*/
class RtmpSink {
public:
RtmpSink();
~RtmpSink();
void SetRtmpUrl(const std::string& url);
bool Open(uint32_t width, uint32_t height, uint32_t framerate,
uint32_t sampleRate, uint32_t numChannels);
void OnVideoFormatReady(const uint8_t* extraData, uint32_t extraDataSize);
void OnAudioFormatReady(uint32_t sampleRate, uint32_t numChannels);
void SendVideoPacket(const uint8_t* data, uint32_t size,
int64_t timestampMs, bool isKeyframe);
void SendAudioPacket(const uint8_t* data, uint32_t size, int64_t timestampMs);
void Close();
bool IsOpen() const;
private:
bool TryExtractAndSendSequenceHeader(const uint8_t* data, uint32_t size);
static std::vector<uint8_t> ConvertAnnexBToAvcc(const uint8_t* data, uint32_t size);
static std::vector<uint8_t> BuildAvccFromAnnexB(const uint8_t* sps, uint32_t spsSize,
const uint8_t* pps, uint32_t ppsSize);
RtmpClient rtmpClient;
std::string rtmpUrl;
bool isOpen = false;
bool videoHeaderSent = false;
bool audioHeaderSent = false;
uint32_t storedSampleRate = 48000;
uint32_t storedNumChannels = 2;
};

View File

@@ -0,0 +1,587 @@
#include "streaming_engine.h"
#include <android/log.h>
#include <GLES3/gl3.h>
#include <GLES2/gl2ext.h>
#include <unistd.h>
#include <cstring>
#include <algorithm>
#define TAG "LckStreamingEngine"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
// Shader source for blitting OES texture to framebuffer
static const char* BLIT_VERTEX_SHADER = R"(#version 300 es
layout(location = 0) in vec2 aPos;
layout(location = 1) in vec2 aTexCoord;
out vec2 vTexCoord;
void main() {
gl_Position = vec4(aPos, 0.0, 1.0);
vTexCoord = aTexCoord;
}
)";
static const char* BLIT_FRAGMENT_SHADER = R"(#version 300 es
#extension GL_OES_EGL_image_external_essl3 : require
precision mediump float;
in vec2 vTexCoord;
out vec4 fragColor;
uniform samplerExternalOES uTexture;
void main() {
fragColor = texture(uTexture, vTexCoord);
}
)";
static GLuint CompileShader(GLenum type, const char* source) {
GLuint shader = glCreateShader(type);
glShaderSource(shader, 1, &source, nullptr);
glCompileShader(shader);
GLint status;
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
if (!status) {
char log[512];
glGetShaderInfoLog(shader, sizeof(log), nullptr, log);
LOGE("Shader compile error: %s", log);
glDeleteShader(shader);
return 0;
}
return shader;
}
StreamingEngine::StreamingEngine() {}
StreamingEngine::~StreamingEngine() {
Stop();
for (auto* sink : sinks) {
delete sink;
}
sinks.clear();
}
bool StreamingEngine::Configure(int w, int h, int vBitrate, int aBitrate,
int sr, int ch, int kfi) {
width = w;
height = h;
videoBitrate = vBitrate;
audioBitrate = aBitrate;
sampleRate = sr;
channels = ch;
keyframeInterval = kfi;
return true;
}
int StreamingEngine::AddDestination(const std::string& rtmpUrl) {
auto* sink = new RtmpSink();
sink->SetRtmpUrl(rtmpUrl);
sinks.push_back(sink);
return static_cast<int>(sinks.size() - 1);
}
bool StreamingEngine::InitVideoEncoder() {
videoEncoder = AMediaCodec_createEncoderByType("video/avc");
if (!videoEncoder) {
LOGE("Failed to create video encoder");
return false;
}
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, "video/avc");
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, width);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, height);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, videoBitrate);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, framerate);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, keyframeInterval);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, 0x7F000789); // COLOR_FormatSurface
AMediaFormat_setInt32(format, "profile", 8); // AVCProfileHigh
AMediaFormat_setInt32(format, "level", 2048); // AVCLevel42
AMediaFormat_setInt32(format, "bitrate-mode", 2); // CBR
media_status_t status = AMediaCodec_configure(videoEncoder, format, nullptr, nullptr,
AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
AMediaFormat_delete(format);
if (status != AMEDIA_OK) {
LOGE("Video encoder configure failed: %d", status);
AMediaCodec_delete(videoEncoder);
videoEncoder = nullptr;
return false;
}
status = AMediaCodec_createInputSurface(videoEncoder, &encoderSurface);
if (status != AMEDIA_OK || !encoderSurface) {
LOGE("Failed to create encoder input surface: %d", status);
AMediaCodec_delete(videoEncoder);
videoEncoder = nullptr;
return false;
}
status = AMediaCodec_start(videoEncoder);
if (status != AMEDIA_OK) {
LOGE("Video encoder start failed: %d", status);
ANativeWindow_release(encoderSurface);
encoderSurface = nullptr;
AMediaCodec_delete(videoEncoder);
videoEncoder = nullptr;
return false;
}
LOGI("Video encoder started: %dx%d @ %d bps", width, height, videoBitrate);
return true;
}
bool StreamingEngine::InitAudioEncoder() {
audioEncoder = AMediaCodec_createEncoderByType("audio/mp4a-latm");
if (!audioEncoder) {
LOGE("Failed to create audio encoder");
return false;
}
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, "audio/mp4a-latm");
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_AAC_PROFILE, 2); // AAC-LC
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, audioBitrate);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, sampleRate);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, channels);
media_status_t status = AMediaCodec_configure(audioEncoder, format, nullptr, nullptr,
AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
AMediaFormat_delete(format);
if (status != AMEDIA_OK) {
LOGE("Audio encoder configure failed: %d", status);
AMediaCodec_delete(audioEncoder);
audioEncoder = nullptr;
return false;
}
status = AMediaCodec_start(audioEncoder);
if (status != AMEDIA_OK) {
LOGE("Audio encoder start failed: %d", status);
AMediaCodec_delete(audioEncoder);
audioEncoder = nullptr;
return false;
}
LOGI("Audio encoder started: %dHz %dch @ %d bps", sampleRate, channels, audioBitrate);
return true;
}
bool StreamingEngine::InitBlitResources() {
GLuint vs = CompileShader(GL_VERTEX_SHADER, BLIT_VERTEX_SHADER);
GLuint fs = CompileShader(GL_FRAGMENT_SHADER, BLIT_FRAGMENT_SHADER);
if (!vs || !fs) return false;
blitProgram = glCreateProgram();
glAttachShader(blitProgram, vs);
glAttachShader(blitProgram, fs);
glLinkProgram(blitProgram);
glDeleteShader(vs);
glDeleteShader(fs);
GLint linkStatus;
glGetProgramiv(blitProgram, GL_LINK_STATUS, &linkStatus);
if (!linkStatus) {
LOGE("Blit program link failed");
glDeleteProgram(blitProgram);
blitProgram = 0;
return false;
}
// Full-screen quad: pos(x,y) + texcoord(u,v)
float quad[] = {
-1.0f, -1.0f, 0.0f, 0.0f,
1.0f, -1.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 1.0f, 1.0f,
};
glGenVertexArrays(1, &blitVao);
glGenBuffers(1, &blitVbo);
glBindVertexArray(blitVao);
glBindBuffer(GL_ARRAY_BUFFER, blitVbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(quad), quad, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)(2 * sizeof(float)));
glEnableVertexAttribArray(1);
glBindVertexArray(0);
return true;
}
void StreamingEngine::ReleaseBlitResources() {
if (blitVao) { glDeleteVertexArrays(1, &blitVao); blitVao = 0; }
if (blitVbo) { glDeleteBuffers(1, &blitVbo); blitVbo = 0; }
if (blitProgram) { glDeleteProgram(blitProgram); blitProgram = 0; }
}
bool StreamingEngine::Start() {
if (running.load()) return true;
if (width <= 0 || height <= 0) {
LOGE("Invalid dimensions: %dx%d", width, height);
return false;
}
if (sinks.empty()) {
LOGE("No destinations configured");
return false;
}
running.store(true);
firstVideoFrame = true;
startTimestampNs = 0;
statsVideoBytes = 0;
statsAudioBytes = 0;
statsFrameCount = 0;
statsLastUpdateNs = 0;
encoderThread = std::thread(&StreamingEngine::EncoderThreadFunc, this);
return true;
}
void StreamingEngine::EncoderThreadFunc() {
LOGI("Encoder thread started");
// Init EGL
if (!eglContext.Init()) {
LOGE("EGL init failed");
running.store(false);
if (errorCallback) errorCallback(1, "EGL initialization failed");
return;
}
// Init video encoder (creates input surface)
if (!InitVideoEncoder()) {
LOGE("Video encoder init failed");
eglContext.Release();
running.store(false);
if (errorCallback) errorCallback(2, "Video encoder initialization failed");
return;
}
// Create EGL window surface from encoder input surface
if (!eglContext.CreateWindowSurface(encoderSurface)) {
LOGE("EGL window surface creation failed");
AMediaCodec_stop(videoEncoder);
AMediaCodec_delete(videoEncoder);
videoEncoder = nullptr;
ANativeWindow_release(encoderSurface);
encoderSurface = nullptr;
eglContext.Release();
running.store(false);
if (errorCallback) errorCallback(3, "EGL window surface creation failed");
return;
}
if (!eglContext.MakeCurrent()) {
LOGE("EGL make current failed");
running.store(false);
if (errorCallback) errorCallback(4, "EGL make current failed");
return;
}
// Init blit resources
if (!InitBlitResources()) {
LOGE("Blit resources init failed");
running.store(false);
if (errorCallback) errorCallback(5, "Blit resources initialization failed");
return;
}
// Init audio encoder
if (!InitAudioEncoder()) {
LOGW("Audio encoder init failed, continuing without audio");
}
// Open RTMP sinks
for (auto* sink : sinks) {
if (!sink->Open(width, height, framerate, sampleRate, channels)) {
LOGE("Failed to open RTMP sink");
if (errorCallback) errorCallback(6, "RTMP connection failed");
}
}
LOGI("Streaming engine fully initialized");
// Main encoder loop
while (running.load()) {
// Process video frames
{
std::lock_guard<std::mutex> lock(videoMutex);
for (auto& frame : videoQueue) {
ProcessVideoFrame(frame);
}
videoQueue.clear();
}
// Process audio frames
{
std::lock_guard<std::mutex> lock(audioMutex);
for (auto& frame : audioQueue) {
ProcessAudioFrame(frame);
}
audioQueue.clear();
}
// Drain encoders
DrainVideoEncoder();
if (audioEncoder) {
DrainAudioEncoder();
}
// Don't spin-wait
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
// Cleanup
LOGI("Encoder thread shutting down");
ReleaseBlitResources();
for (auto* sink : sinks) {
sink->Close();
}
if (videoEncoder) {
AMediaCodec_stop(videoEncoder);
AMediaCodec_delete(videoEncoder);
videoEncoder = nullptr;
}
if (encoderSurface) {
ANativeWindow_release(encoderSurface);
encoderSurface = nullptr;
}
if (audioEncoder) {
AMediaCodec_stop(audioEncoder);
AMediaCodec_delete(audioEncoder);
audioEncoder = nullptr;
}
eglContext.Release();
LOGI("Encoder thread stopped");
}
void StreamingEngine::ProcessVideoFrame(const VideoFrame& frame) {
if (!frame.buffer) return;
if (firstVideoFrame) {
startTimestampNs = frame.timestampNs;
firstVideoFrame = false;
}
// Wait on GPU fence
eglContext.WaitFence(frame.fenceFd);
// Import HardwareBuffer as GL texture
GLuint texture = eglContext.ImportHardwareBuffer(frame.buffer);
if (texture == 0) {
LOGW("Failed to import HardwareBuffer as texture");
return;
}
// Blit to encoder surface
BlitToEncoder(texture, frame.timestampNs);
// Clean up texture
glDeleteTextures(1, &texture);
}
void StreamingEngine::BlitToEncoder(GLuint srcTexture, int64_t timestampNs) {
glViewport(0, 0, width, height);
glUseProgram(blitProgram);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, srcTexture);
glUniform1i(glGetUniformLocation(blitProgram, "uTexture"), 0);
glBindVertexArray(blitVao);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindVertexArray(0);
eglContext.SetPresentationTime(timestampNs);
eglContext.SwapBuffers();
}
void StreamingEngine::ProcessAudioFrame(const AudioFrame& frame) {
if (!audioEncoder || frame.pcmData.empty()) return;
ssize_t inputIndex = AMediaCodec_dequeueInputBuffer(audioEncoder, 0);
if (inputIndex < 0) {
LOGW("No audio input buffer available");
return;
}
size_t bufferSize;
uint8_t* inputBuffer = AMediaCodec_getInputBuffer(audioEncoder, inputIndex, &bufferSize);
if (!inputBuffer) return;
size_t copySize = std::min(frame.pcmData.size(), bufferSize);
memcpy(inputBuffer, frame.pcmData.data(), copySize);
int64_t relativeTs = frame.timestampNs - startTimestampNs;
AMediaCodec_queueInputBuffer(audioEncoder, inputIndex, 0, copySize,
relativeTs / 1000, 0);
}
void StreamingEngine::DrainVideoEncoder() {
if (!videoEncoder) return;
AMediaCodecBufferInfo info;
ssize_t outputIndex;
while ((outputIndex = AMediaCodec_dequeueOutputBuffer(videoEncoder, &info, 0)) >= 0) {
if (info.size > 0) {
size_t outSize;
uint8_t* outputData = AMediaCodec_getOutputBuffer(videoEncoder, outputIndex, &outSize);
if (outputData) {
bool isKeyframe = (info.flags & AMEDIACODEC_BUFFER_FLAG_KEY_FRAME) != 0;
int64_t timestampMs = info.presentationTimeUs / 1000;
for (auto* sink : sinks) {
sink->SendVideoPacket(outputData + info.offset, info.size,
timestampMs, isKeyframe);
}
std::lock_guard<std::mutex> lock(statsMutex);
statsVideoBytes += info.size;
statsFrameCount++;
}
}
if (info.flags & AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG) {
// Sequence header (SPS/PPS) — forward to sinks
size_t outSize;
uint8_t* configData = AMediaCodec_getOutputBuffer(videoEncoder, outputIndex, &outSize);
if (configData) {
for (auto* sink : sinks) {
sink->OnVideoFormatReady(configData + info.offset, info.size);
}
}
}
AMediaCodec_releaseOutputBuffer(videoEncoder, outputIndex, false);
UpdateStats();
}
if (outputIndex == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
AMediaFormat* newFormat = AMediaCodec_getOutputFormat(videoEncoder);
if (newFormat) {
LOGI("Video encoder output format changed");
AMediaFormat_delete(newFormat);
}
}
}
void StreamingEngine::DrainAudioEncoder() {
if (!audioEncoder) return;
AMediaCodecBufferInfo info;
ssize_t outputIndex;
while ((outputIndex = AMediaCodec_dequeueOutputBuffer(audioEncoder, &info, 0)) >= 0) {
if (info.size > 0 && !(info.flags & AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG)) {
size_t outSize;
uint8_t* outputData = AMediaCodec_getOutputBuffer(audioEncoder, outputIndex, &outSize);
if (outputData) {
int64_t timestampMs = info.presentationTimeUs / 1000;
for (auto* sink : sinks) {
sink->SendAudioPacket(outputData + info.offset, info.size, timestampMs);
}
std::lock_guard<std::mutex> lock(statsMutex);
statsAudioBytes += info.size;
}
}
if (info.flags & AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG) {
// AAC config — sinks handle audio format via Open()
}
AMediaCodec_releaseOutputBuffer(audioEncoder, outputIndex, false);
}
}
void StreamingEngine::UpdateStats() {
auto now = std::chrono::steady_clock::now().time_since_epoch();
int64_t nowNs = std::chrono::duration_cast<std::chrono::nanoseconds>(now).count();
std::lock_guard<std::mutex> lock(statsMutex);
if (statsLastUpdateNs == 0) {
statsLastUpdateNs = nowNs;
return;
}
int64_t elapsedNs = nowNs - statsLastUpdateNs;
if (elapsedNs >= 1000000000LL) { // Every second
double elapsedSec = elapsedNs / 1000000000.0;
currentStats.videoBitrate = static_cast<int64_t>(statsVideoBytes * 8 / elapsedSec);
currentStats.audioBitrate = static_cast<int64_t>(statsAudioBytes * 8 / elapsedSec);
currentStats.fps = static_cast<int>(statsFrameCount / elapsedSec);
statsVideoBytes = 0;
statsAudioBytes = 0;
statsFrameCount = 0;
statsLastUpdateNs = nowNs;
if (statsCallback) {
statsCallback(currentStats);
}
}
}
void StreamingEngine::SubmitVideoFrame(AHardwareBuffer* buffer, int64_t timestampNs, int fenceFd) {
if (!running.load()) {
if (fenceFd >= 0) close(fenceFd);
return;
}
VideoFrame frame;
frame.buffer = buffer;
frame.timestampNs = timestampNs;
frame.fenceFd = fenceFd;
std::lock_guard<std::mutex> lock(videoMutex);
videoQueue.push_back(frame);
}
void StreamingEngine::SubmitAudioFrame(const uint8_t* pcmData, size_t pcmSize, int64_t timestampNs) {
if (!running.load()) return;
AudioFrame frame;
frame.pcmData.assign(pcmData, pcmData + pcmSize);
frame.timestampNs = timestampNs;
std::lock_guard<std::mutex> lock(audioMutex);
audioQueue.push_back(std::move(frame));
}
void StreamingEngine::Stop() {
if (!running.load()) return;
LOGI("Stopping streaming engine");
running.store(false);
if (encoderThread.joinable()) {
encoderThread.join();
}
LOGI("Streaming engine stopped");
}
void StreamingEngine::SetStatsCallback(StatsCallback callback) {
statsCallback = std::move(callback);
}
void StreamingEngine::SetErrorCallback(ErrorCallback callback) {
errorCallback = std::move(callback);
}
void StreamingEngine::SetBufferReleasedCallback(BufferReleasedCallback callback) {
bufferReleasedCallback = std::move(callback);
}

View File

@@ -0,0 +1,150 @@
#pragma once
#include "egl_context.h"
#include "rtmp_sink.h"
#include <media/NdkMediaCodec.h>
#include <media/NdkMediaFormat.h>
#include <android/hardware_buffer.h>
#include <android/native_window.h>
#include <atomic>
#include <cstdint>
#include <functional>
#include <mutex>
#include <string>
#include <thread>
#include <vector>
struct VideoFrame {
AHardwareBuffer* buffer;
int64_t timestampNs;
int fenceFd; // -1 if no fence
};
struct AudioFrame {
std::vector<uint8_t> pcmData;
int64_t timestampNs;
};
struct StreamingStats {
int64_t videoBitrate = 0;
int64_t audioBitrate = 0;
int fps = 0;
int droppedFrames = 0;
};
/**
* Streaming engine: imports HardwareBuffers via EGL, encodes with AMediaCodec,
* and streams via RTMP to one or more destinations.
*
* All encoding happens in native code (zero-copy pipeline).
*/
class StreamingEngine {
public:
using StatsCallback = std::function<void(const StreamingStats&)>;
using ErrorCallback = std::function<void(int code, const std::string& message)>;
using BufferReleasedCallback = std::function<void(int bufferIndex)>;
StreamingEngine();
~StreamingEngine();
/** Configure the engine. Must be called before Start(). */
bool Configure(int width, int height, int videoBitrate, int audioBitrate,
int sampleRate, int channels, int keyframeInterval);
/** Add an RTMP destination. Returns destination index. */
int AddDestination(const std::string& rtmpUrl);
/** Start encoding and streaming. */
bool Start();
/** Submit a video frame from HardwareBuffer. Non-blocking. */
void SubmitVideoFrame(AHardwareBuffer* buffer, int64_t timestampNs, int fenceFd);
/** Submit audio PCM data. Non-blocking. */
void SubmitAudioFrame(const uint8_t* pcmData, size_t pcmSize, int64_t timestampNs);
/** Stop encoding and streaming. Blocks until clean shutdown. */
void Stop();
/** Set callbacks. */
void SetStatsCallback(StatsCallback callback);
void SetErrorCallback(ErrorCallback callback);
void SetBufferReleasedCallback(BufferReleasedCallback callback);
bool IsRunning() const { return running.load(); }
private:
// Encoder thread
void EncoderThreadFunc();
void ProcessVideoFrame(const VideoFrame& frame);
void ProcessAudioFrame(const AudioFrame& frame);
void DrainVideoEncoder();
void DrainAudioEncoder();
void UpdateStats();
// Blit HardwareBuffer texture to encoder surface
void BlitToEncoder(GLuint srcTexture, int64_t timestampNs);
// Config
int width = 0;
int height = 0;
int videoBitrate = 6000000;
int audioBitrate = 128000;
int sampleRate = 48000;
int channels = 2;
int keyframeInterval = 2;
int framerate = 30;
// EGL
EglContext eglContext;
// Blit resources
GLuint blitProgram = 0;
GLuint blitVao = 0;
GLuint blitVbo = 0;
// Video encoder
AMediaCodec* videoEncoder = nullptr;
ANativeWindow* encoderSurface = nullptr;
// Audio encoder
AMediaCodec* audioEncoder = nullptr;
// RTMP sinks (one per destination)
std::vector<RtmpSink*> sinks;
// Threading
std::thread encoderThread;
std::atomic<bool> running{false};
// Frame queues (protected by mutex)
std::mutex videoMutex;
std::vector<VideoFrame> videoQueue;
std::mutex audioMutex;
std::vector<AudioFrame> audioQueue;
// Stats
std::mutex statsMutex;
StreamingStats currentStats;
int64_t statsVideoBytes = 0;
int64_t statsAudioBytes = 0;
int statsFrameCount = 0;
int64_t statsLastUpdateNs = 0;
// Start timestamp for relative timing
int64_t startTimestampNs = 0;
bool firstVideoFrame = true;
// Callbacks
StatsCallback statsCallback;
ErrorCallback errorCallback;
BufferReleasedCallback bufferReleasedCallback;
bool InitVideoEncoder();
bool InitAudioEncoder();
bool InitBlitResources();
void ReleaseBlitResources();
};

View File

@@ -0,0 +1,164 @@
#ifndef __AMF_H__
#define __AMF_H__
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdint.h>
#ifndef TRUE
#define TRUE 1
#define FALSE 0
#endif
#ifdef __cplusplus
extern "C"
{
#endif
typedef enum
{ AMF_NUMBER = 0, AMF_BOOLEAN, AMF_STRING, AMF_OBJECT,
AMF_MOVIECLIP, /* reserved, not used */
AMF_NULL, AMF_UNDEFINED, AMF_REFERENCE, AMF_ECMA_ARRAY, AMF_OBJECT_END,
AMF_STRICT_ARRAY, AMF_DATE, AMF_LONG_STRING, AMF_UNSUPPORTED,
AMF_RECORDSET, /* reserved, not used */
AMF_XML_DOC, AMF_TYPED_OBJECT,
AMF_AVMPLUS, /* switch to AMF3 */
AMF_INVALID = 0xff
} AMFDataType;
typedef enum
{ AMF3_UNDEFINED = 0, AMF3_NULL, AMF3_FALSE, AMF3_TRUE,
AMF3_INTEGER, AMF3_DOUBLE, AMF3_STRING, AMF3_XML_DOC, AMF3_DATE,
AMF3_ARRAY, AMF3_OBJECT, AMF3_XML, AMF3_BYTE_ARRAY
} AMF3DataType;
typedef struct AVal
{
char *av_val;
int av_len;
} AVal;
#define AVC(str) {str,sizeof(str)-1}
#define AVMATCH(a1,a2) ((a1)->av_len == (a2)->av_len && !memcmp((a1)->av_val,(a2)->av_val,(a1)->av_len))
struct AMFObjectProperty;
typedef struct AMFObject
{
int o_num;
struct AMFObjectProperty *o_props;
} AMFObject;
typedef struct AMFObjectProperty
{
AVal p_name;
AMFDataType p_type;
union
{
double p_number;
AVal p_aval;
AMFObject p_object;
} p_vu;
int16_t p_UTCoffset;
} AMFObjectProperty;
char *AMF_EncodeString(char *output, char *outend, const AVal * str);
char *AMF_EncodeNumber(char *output, char *outend, double dVal);
char *AMF_EncodeInt16(char *output, char *outend, short nVal);
char *AMF_EncodeInt24(char *output, char *outend, int nVal);
char *AMF_EncodeInt32(char *output, char *outend, int nVal);
char *AMF_EncodeBoolean(char *output, char *outend, int bVal);
/* Shortcuts for AMFProp_Encode */
char *AMF_EncodeNamedString(char *output, char *outend, const AVal * name, const AVal * value);
char *AMF_EncodeNamedNumber(char *output, char *outend, const AVal * name, double dVal);
char *AMF_EncodeNamedBoolean(char *output, char *outend, const AVal * name, int bVal);
unsigned short AMF_DecodeInt16(const char *data);
unsigned int AMF_DecodeInt24(const char *data);
unsigned int AMF_DecodeInt32(const char *data);
void AMF_DecodeString(const char *data, AVal * str);
void AMF_DecodeLongString(const char *data, AVal * str);
int AMF_DecodeBoolean(const char *data);
double AMF_DecodeNumber(const char *data);
char *AMF_Encode(AMFObject * obj, char *pBuffer, char *pBufEnd);
char *AMF_EncodeEcmaArray(AMFObject *obj, char *pBuffer, char *pBufEnd);
char *AMF_EncodeArray(AMFObject *obj, char *pBuffer, char *pBufEnd);
int AMF_Decode(AMFObject * obj, const char *pBuffer, int nSize,
int bDecodeName);
int AMF_DecodeArray(AMFObject * obj, const char *pBuffer, int nSize,
int nArrayLen, int bDecodeName);
int AMF3_Decode(AMFObject * obj, const char *pBuffer, int nSize,
int bDecodeName);
void AMF_Dump(AMFObject * obj);
void AMF_Reset(AMFObject * obj);
void AMF_AddProp(AMFObject * obj, const AMFObjectProperty * prop);
int AMF_CountProp(AMFObject * obj);
AMFObjectProperty *AMF_GetProp(AMFObject * obj, const AVal * name,
int nIndex);
AMFDataType AMFProp_GetType(AMFObjectProperty * prop);
void AMFProp_SetNumber(AMFObjectProperty * prop, double dval);
void AMFProp_SetBoolean(AMFObjectProperty * prop, int bflag);
void AMFProp_SetString(AMFObjectProperty * prop, AVal * str);
void AMFProp_SetObject(AMFObjectProperty * prop, AMFObject * obj);
void AMFProp_GetName(AMFObjectProperty * prop, AVal * name);
void AMFProp_SetName(AMFObjectProperty * prop, AVal * name);
double AMFProp_GetNumber(AMFObjectProperty * prop);
int AMFProp_GetBoolean(AMFObjectProperty * prop);
void AMFProp_GetString(AMFObjectProperty * prop, AVal * str);
void AMFProp_GetObject(AMFObjectProperty * prop, AMFObject * obj);
int AMFProp_IsValid(AMFObjectProperty * prop);
char *AMFProp_Encode(AMFObjectProperty * prop, char *pBuffer, char *pBufEnd);
int AMF3Prop_Decode(AMFObjectProperty * prop, const char *pBuffer,
int nSize, int bDecodeName);
int AMFProp_Decode(AMFObjectProperty * prop, const char *pBuffer,
int nSize, int bDecodeName);
void AMFProp_Dump(AMFObjectProperty * prop);
void AMFProp_Reset(AMFObjectProperty * prop);
typedef struct AMF3ClassDef
{
AVal cd_name;
char cd_externalizable;
char cd_dynamic;
int cd_num;
AVal *cd_props;
} AMF3ClassDef;
void AMF3CD_AddProp(AMF3ClassDef * cd, AVal * prop);
AVal *AMF3CD_GetProp(AMF3ClassDef * cd, int idx);
#ifdef __cplusplus
}
#endif
#endif /* __AMF_H__ */

View File

@@ -0,0 +1,91 @@
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifndef __BYTES_H__
#define __BYTES_H__
#include <stdint.h>
#ifdef _WIN32
/* Windows is little endian only */
#define __LITTLE_ENDIAN 1234
#define __BIG_ENDIAN 4321
#define __BYTE_ORDER __LITTLE_ENDIAN
#define __FLOAT_WORD_ORDER __BYTE_ORDER
typedef unsigned char uint8_t;
#else /* !_WIN32 */
#include <sys/param.h>
#if defined(BYTE_ORDER) && !defined(__BYTE_ORDER)
#define __BYTE_ORDER BYTE_ORDER
#endif
#if defined(BIG_ENDIAN) && !defined(__BIG_ENDIAN)
#define __BIG_ENDIAN BIG_ENDIAN
#endif
#if defined(LITTLE_ENDIAN) && !defined(__LITTLE_ENDIAN)
#define __LITTLE_ENDIAN LITTLE_ENDIAN
#endif
#endif /* !_WIN32 */
/* define default endianness */
#ifndef __LITTLE_ENDIAN
#define __LITTLE_ENDIAN 1234
#endif
#ifndef __BIG_ENDIAN
#define __BIG_ENDIAN 4321
#endif
#ifndef __BYTE_ORDER
#warning "Byte order not defined on your system, assuming little endian!"
#define __BYTE_ORDER __LITTLE_ENDIAN
#endif
/* ok, we assume to have the same float word order and byte order if float word order is not defined */
#ifndef __FLOAT_WORD_ORDER
#warning "Float word order not defined, assuming the same as byte order!"
#define __FLOAT_WORD_ORDER __BYTE_ORDER
#endif
#if !defined(__BYTE_ORDER) || !defined(__FLOAT_WORD_ORDER)
#error "Undefined byte or float word order!"
#endif
#if __FLOAT_WORD_ORDER != __BIG_ENDIAN && __FLOAT_WORD_ORDER != __LITTLE_ENDIAN
#error "Unknown/unsupported float word order!"
#endif
#if __BYTE_ORDER != __BIG_ENDIAN && __BYTE_ORDER != __LITTLE_ENDIAN
#error "Unknown/unsupported byte order!"
#endif
#endif

View File

@@ -0,0 +1,402 @@
/* RTMPDump - Diffie-Hellmann Key Exchange
* Copyright (C) 2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <limits.h>
#ifdef USE_POLARSSL
#include <polarssl/dhm.h>
typedef mpi * MP_t;
#define MP_new(m) m = malloc(sizeof(mpi)); mpi_init(m)
#define MP_set_w(mpi, w) mpi_lset(mpi, w)
#define MP_cmp(u, v) mpi_cmp_mpi(u, v)
#define MP_set(u, v) mpi_copy(u, v)
#define MP_sub_w(mpi, w) mpi_sub_int(mpi, mpi, w)
#define MP_cmp_1(mpi) mpi_cmp_int(mpi, 1)
#define MP_modexp(r, y, q, p) mpi_exp_mod(r, y, q, p, NULL)
#define MP_free(mpi) mpi_free(mpi); free(mpi)
#define MP_gethex(u, hex, res) MP_new(u); res = mpi_read_string(u, 16, hex) == 0
#define MP_bytes(u) mpi_size(u)
#define MP_setbin(u,buf,len) mpi_write_binary(u,buf,len)
#define MP_getbin(u,buf,len) MP_new(u); mpi_read_binary(u,buf,len)
#define MP_setpg(dh, p, g) dh->p = p; dh->g = g
#define MP_setlength(dh, l) dh->length = l
#define MP_getp(dh) dh->p
#define MP_getpubkey(dh) dh->pub_key
typedef struct MDH {
MP_t p;
MP_t g;
MP_t pub_key;
MP_t priv_key;
long length;
dhm_context ctx;
} MDH;
#define MDH_new() calloc(1,sizeof(MDH))
#define MDH_free(vp) {MDH *_dh = vp; dhm_free(&_dh->ctx); MP_free(_dh->p); MP_free(_dh->g); MP_free(_dh->pub_key); MP_free(_dh->priv_key); free(_dh);}
static int MDH_generate_key(MDH *dh)
{
unsigned char out[2];
MP_set(&dh->ctx.P, dh->p);
MP_set(&dh->ctx.G, dh->g);
dh->ctx.len = 128;
dhm_make_public(&dh->ctx, 1024, out, 1, havege_random, &RTMP_TLS_ctx->hs);
MP_new(dh->pub_key);
MP_new(dh->priv_key);
MP_set(dh->pub_key, &dh->ctx.GX);
MP_set(dh->priv_key, &dh->ctx.X);
return 1;
}
static int MDH_compute_key(uint8_t *secret, size_t len, MP_t pub, MDH *dh)
{
MP_set(&dh->ctx.GY, pub);
dhm_calc_secret(&dh->ctx, secret, &len);
return 0;
}
#elif defined(USE_GNUTLS)
#include <gmp.h>
#include <nettle/bignum.h>
#include <gnutls/crypto.h>
typedef mpz_ptr MP_t;
#define MP_new(m) m = malloc(sizeof(*m)); mpz_init2(m, 1)
#define MP_set_w(mpi, w) mpz_set_ui(mpi, w)
#define MP_cmp(u, v) mpz_cmp(u, v)
#define MP_set(u, v) mpz_set(u, v)
#define MP_sub_w(mpi, w) mpz_sub_ui(mpi, mpi, w)
#define MP_cmp_1(mpi) mpz_cmp_ui(mpi, 1)
#define MP_modexp(r, y, q, p) mpz_powm(r, y, q, p)
#define MP_free(mpi) mpz_clear(mpi); free(mpi)
#define MP_gethex(u, hex, res) u = malloc(sizeof(*u)); mpz_init2(u, 1); res = (mpz_set_str(u, hex, 16) == 0)
#define MP_bytes(u) (mpz_sizeinbase(u, 2) + 7) / 8
#define MP_setbin(u,buf,len) nettle_mpz_get_str_256(len,buf,u)
#define MP_getbin(u,buf,len) u = malloc(sizeof(*u)); mpz_init2(u, 1); nettle_mpz_set_str_256_u(u,len,buf)
#define MP_setpg(dh, p, g) dh->p = p; dh->g = g
#define MP_setlength(dh, l) dh->length = l
#define MP_getp(dh) dh->p
#define MP_getpubkey(dh) dh->pub_key
typedef struct MDH {
MP_t p;
MP_t g;
MP_t pub_key;
MP_t priv_key;
long length;
} MDH;
#define MDH_new() calloc(1,sizeof(MDH))
#define MDH_free(dh) do {MP_free(((MDH*)(dh))->p); MP_free(((MDH*)(dh))->g); MP_free(((MDH*)(dh))->pub_key); MP_free(((MDH*)(dh))->priv_key); free(dh);} while(0)
static int MDH_generate_key(MDH *dh)
{
int num_bytes;
uint32_t seed;
gmp_randstate_t rs;
num_bytes = (mpz_sizeinbase(dh->p, 2) + 7) / 8 - 1;
if (num_bytes <= 0 || num_bytes > 18000)
return 0;
dh->priv_key = calloc(1, sizeof(*dh->priv_key));
if (!dh->priv_key)
return 0;
mpz_init2(dh->priv_key, 1);
gnutls_rnd(GNUTLS_RND_RANDOM, &seed, sizeof(seed));
gmp_randinit_mt(rs);
gmp_randseed_ui(rs, seed);
mpz_urandomb(dh->priv_key, rs, num_bytes);
gmp_randclear(rs);
dh->pub_key = calloc(1, sizeof(*dh->pub_key));
if (!dh->pub_key)
return 0;
mpz_init2(dh->pub_key, 1);
if (!dh->pub_key) {
mpz_clear(dh->priv_key);
free(dh->priv_key);
return 0;
}
mpz_powm(dh->pub_key, dh->g, dh->priv_key, dh->p);
return 1;
}
static int MDH_compute_key(uint8_t *secret, size_t len, MP_t pub, MDH *dh)
{
mpz_ptr k;
int num_bytes;
num_bytes = (mpz_sizeinbase(dh->p, 2) + 7) / 8;
if (num_bytes <= 0 || num_bytes > 18000)
return -1;
k = calloc(1, sizeof(*k));
if (!k)
return -1;
mpz_init2(k, 1);
mpz_powm(k, pub, dh->priv_key, dh->p);
nettle_mpz_get_str_256(len, secret, k);
mpz_clear(k);
free(k);
/* return the length of the shared secret key like DH_compute_key */
return len;
}
#else /* USE_OPENSSL */
#include <openssl/bn.h>
#include <openssl/dh.h>
typedef BIGNUM * MP_t;
#define MP_new(m) m = BN_new()
#define MP_set_w(mpi, w) BN_set_word(mpi, w)
#define MP_cmp(u, v) BN_cmp(u, v)
#define MP_set(u, v) BN_copy(u, v)
#define MP_sub_w(mpi, w) BN_sub_word(mpi, w)
#define MP_cmp_1(mpi) BN_cmp(mpi, BN_value_one())
#define MP_modexp(r, y, q, p) do {BN_CTX *ctx = BN_CTX_new(); BN_mod_exp(r, y, q, p, ctx); BN_CTX_free(ctx);} while(0)
#define MP_free(mpi) BN_free(mpi)
#define MP_gethex(u, hex, res) res = BN_hex2bn(&u, hex)
#define MP_bytes(u) BN_num_bytes(u)
#define MP_setbin(u,buf,len) BN_bn2bin(u,buf)
#define MP_getbin(u,buf,len) u = BN_bin2bn(buf,len,0)
#define MDH DH
#define MDH_new() DH_new()
#define MDH_free(dh) DH_free(dh)
#define MDH_generate_key(dh) DH_generate_key(dh)
#define MDH_compute_key(secret, seclen, pub, dh) DH_compute_key(secret, pub, dh)
#if OPENSSL_VERSION_NUMBER >= 0x10100000
#define MP_setpg(dh, p, g) DH_set0_pqg(dh, p, NULL, g)
#define MP_setlength(dh, l) DH_set_length(dh, l)
#define MP_getp(dh) DH_get0_p(dh)
#define MP_getpubkey(dh) DH_get0_pub_key(dh)
#else
#define MP_setpg(dh, p, g) dh->p = p; dh->g = g
#define MP_setlength(dh, l) dh->length = l
#define MP_getp(dh) dh->p
#define MP_getpubkey(dh) dh->pub_key
#endif
#endif
#include "log.h"
#include "dhgroups.h"
/* RFC 2631, Section 2.1.5, http://www.ietf.org/rfc/rfc2631.txt */
static int
isValidPublicKey(MP_t y, MP_t p, MP_t q)
{
int ret = TRUE;
MP_t bn;
assert(y);
MP_new(bn);
assert(bn);
/* y must lie in [2,p-1] */
MP_set_w(bn, 1);
if (MP_cmp(y, bn) < 0)
{
RTMP_Log(RTMP_LOGERROR, "DH public key must be at least 2");
ret = FALSE;
goto failed;
}
/* bn = p-2 */
MP_set(bn, p);
MP_sub_w(bn, 1);
if (MP_cmp(y, bn) > 0)
{
RTMP_Log(RTMP_LOGERROR, "DH public key must be at most p-2");
ret = FALSE;
goto failed;
}
/* Verify with Sophie-Germain prime
*
* This is a nice test to make sure the public key position is calculated
* correctly. This test will fail in about 50% of the cases if applied to
* random data.
*/
if (q)
{
/* y must fulfill y^q mod p = 1 */
MP_modexp(bn, y, q, p);
if (MP_cmp_1(bn) != 0)
{
RTMP_Log(RTMP_LOGWARNING, "DH public key does not fulfill y^q mod p = 1");
}
}
failed:
MP_free(bn);
return ret;
}
static MDH *
DHInit(int nKeyBits)
{
size_t res;
MDH *dh = MDH_new();
MP_t g, p;
if (!dh)
goto failed;
MP_new(g);
if (!g)
goto failed;
MP_gethex(p, P1024, res); /* prime P1024, see dhgroups.h */
if (!res)
{
goto failed;
}
MP_set_w(g, 2); /* base 2 */
MP_setpg(dh, p, g);
MP_setlength(dh, nKeyBits);
return dh;
failed:
if (dh)
MDH_free(dh);
return 0;
}
static int
DHGenerateKey(MDH *dh)
{
MP_t q1;
size_t res;
if (!dh)
return 0;
MP_gethex(q1, Q1024, res);
assert(res);
do
{
if (MDH_generate_key(dh))
{
MP_t key = (MP_t)MP_getpubkey(dh);
MP_t p = (MP_t)MP_getp(dh);
res = isValidPublicKey(key, p, q1);
}
else
{
#if !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000
MP_free(dh->pub_key);
MP_free(dh->priv_key);
dh->pub_key = dh->priv_key = 0;
#endif
res = 0;
break;
}
} while (!res);
MP_free(q1);
return res;
}
/* fill pubkey with the public key in BIG ENDIAN order
* 00 00 00 00 00 x1 x2 x3 .....
*/
static int
DHGetPublicKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen)
{
int len;
MP_t pub_key;
if (!dh || !(pub_key = (MP_t)MP_getpubkey(dh)))
return 0;
len = MP_bytes(pub_key);
if (len <= 0 || len > (int) nPubkeyLen)
return 0;
memset(pubkey, 0, nPubkeyLen);
MP_setbin(pub_key, pubkey + (nPubkeyLen - len), len);
return 1;
}
#if 0 /* unused */
static int
DHGetPrivateKey(MDH *dh, uint8_t *privkey, size_t nPrivkeyLen)
{
if (!dh || !dh->priv_key)
return 0;
int len = MP_bytes(dh->priv_key);
if (len <= 0 || len > (int) nPrivkeyLen)
return 0;
memset(privkey, 0, nPrivkeyLen);
MP_setbin(dh->priv_key, privkey + (nPrivkeyLen - len), len);
return 1;
}
#endif
/* computes the shared secret key from the private MDH value and the
* other party's public key (pubkey)
*/
static int
DHComputeSharedSecretKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen,
uint8_t *secret)
{
MP_t q1 = NULL, pubkeyBn = NULL;
size_t len;
int res;
if (!dh || !secret || nPubkeyLen >= INT_MAX)
return -1;
MP_getbin(pubkeyBn, pubkey, nPubkeyLen);
if (!pubkeyBn)
return -1;
MP_gethex(q1, Q1024, len);
assert(len);
if (isValidPublicKey(pubkeyBn, (MP_t)MP_getp(dh), q1))
res = MDH_compute_key(secret, nPubkeyLen, pubkeyBn, dh);
else
res = -1;
MP_free(q1);
MP_free(pubkeyBn);
return res;
}

View File

@@ -0,0 +1,199 @@
/* librtmp - Diffie-Hellmann Key Exchange
* Copyright (C) 2009 Andrej Stepanchuk
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
/* from RFC 3526, see http://www.ietf.org/rfc/rfc3526.txt */
/* 2^768 - 2 ^704 - 1 + 2^64 * { [2^638 pi] + 149686 } */
#define P768 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A63A3620FFFFFFFFFFFFFFFF"
/* 2^1024 - 2^960 - 1 + 2^64 * { [2^894 pi] + 129093 } */
#define P1024 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381" \
"FFFFFFFFFFFFFFFF"
/* Group morder largest prime factor: */
#define Q1024 \
"7FFFFFFFFFFFFFFFE487ED5110B4611A62633145C06E0E68" \
"948127044533E63A0105DF531D89CD9128A5043CC71A026E" \
"F7CA8CD9E69D218D98158536F92F8A1BA7F09AB6B6A8E122" \
"F242DABB312F3F637A262174D31BF6B585FFAE5B7A035BF6" \
"F71C35FDAD44CFD2D74F9208BE258FF324943328F67329C0" \
"FFFFFFFFFFFFFFFF"
/* 2^1536 - 2^1472 - 1 + 2^64 * { [2^1406 pi] + 741804 } */
#define P1536 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF"
/* 2^2048 - 2^1984 - 1 + 2^64 * { [2^1918 pi] + 124476 } */
#define P2048 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AACAA68FFFFFFFFFFFFFFFF"
/* 2^3072 - 2^3008 - 1 + 2^64 * { [2^2942 pi] + 1690314 } */
#define P3072 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A93AD2CAFFFFFFFFFFFFFFFF"
/* 2^4096 - 2^4032 - 1 + 2^64 * { [2^3966 pi] + 240904 } */
#define P4096 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199" \
"FFFFFFFFFFFFFFFF"
/* 2^6144 - 2^6080 - 1 + 2^64 * { [2^6014 pi] + 929484 } */
#define P6144 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934028492" \
"36C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BD" \
"F8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831" \
"179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1B" \
"DB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF" \
"5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6" \
"D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F3" \
"23A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AA" \
"CC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE328" \
"06A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55C" \
"DA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE" \
"12BF2D5B0B7474D6E694F91E6DCC4024FFFFFFFFFFFFFFFF"
/* 2^8192 - 2^8128 - 1 + 2^64 * { [2^8062 pi] + 4743158 } */
#define P8192 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934028492" \
"36C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BD" \
"F8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831" \
"179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1B" \
"DB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF" \
"5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6" \
"D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F3" \
"23A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AA" \
"CC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE328" \
"06A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55C" \
"DA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE" \
"12BF2D5B0B7474D6E694F91E6DBE115974A3926F12FEE5E4" \
"38777CB6A932DF8CD8BEC4D073B931BA3BC832B68D9DD300" \
"741FA7BF8AFC47ED2576F6936BA424663AAB639C5AE4F568" \
"3423B4742BF1C978238F16CBE39D652DE3FDB8BEFC848AD9" \
"22222E04A4037C0713EB57A81A23F0C73473FC646CEA306B" \
"4BCBC8862F8385DDFA9D4B7FA2C087E879683303ED5BDD3A" \
"062B3CF5B3A278A66D2A13F83F44F82DDF310EE074AB6A36" \
"4597E899A0255DC164F31CC50846851DF9AB48195DED7EA1" \
"B1D510BD7EE74D73FAF36BC31ECFA268359046F4EB879F92" \
"4009438B481C6CD7889A002ED5EE382BC9190DA6FC026E47" \
"9558E4475677E9AA9E3050E2765694DFC81F56E880B96E71" \
"60C980DD98EDD3DFFFFFFFFFFFFFFFFF"

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,47 @@
#ifndef __RTMP_HTTP_H__
#define __RTMP_HTTP_H__
/*
* Copyright (C) 2010 Howard Chu
* Copyright (C) 2010 Antti Ajanki
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
typedef enum {
HTTPRES_OK, /* result OK */
HTTPRES_OK_NOT_MODIFIED, /* not modified since last request */
HTTPRES_NOT_FOUND, /* not found */
HTTPRES_BAD_REQUEST, /* client error */
HTTPRES_SERVER_ERROR, /* server reported an error */
HTTPRES_REDIRECTED, /* resource has been moved */
HTTPRES_LOST_CONNECTION /* connection lost while waiting for data */
} HTTPResult;
struct HTTP_ctx {
char *date;
int size;
int status;
void *data;
};
typedef size_t (HTTP_read_callback)(void *ptr, size_t size, size_t nmemb, void *stream);
HTTPResult HTTP_get(struct HTTP_ctx *http, const char *url, HTTP_read_callback *cb);
#endif

View File

@@ -0,0 +1,69 @@
/*
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifndef __RTMP_LOG_H__
#define __RTMP_LOG_H__
#include <stdio.h>
#include <stdarg.h>
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
/* Enable this to get full debugging output */
/* #define _DEBUG */
#ifdef _DEBUG
#undef NODEBUG
#endif
typedef enum
{ RTMP_LOGCRIT=0, RTMP_LOGERROR, RTMP_LOGWARNING, RTMP_LOGINFO,
RTMP_LOGDEBUG, RTMP_LOGDEBUG2, RTMP_LOGALL
} RTMP_LogLevel;
extern RTMP_LogLevel RTMP_debuglevel;
typedef void (RTMP_LogCallback)(int level, const char *fmt, va_list);
void RTMP_LogSetCallback(RTMP_LogCallback *cb);
void RTMP_LogSetOutput(FILE *file);
#ifdef __GNUC__
void RTMP_LogPrintf(const char *format, ...) __attribute__ ((__format__ (__printf__, 1, 2)));
void RTMP_LogStatus(const char *format, ...) __attribute__ ((__format__ (__printf__, 1, 2)));
void RTMP_Log(int level, const char *format, ...) __attribute__ ((__format__ (__printf__, 2, 3)));
#else
void RTMP_LogPrintf(const char *format, ...);
void RTMP_LogStatus(const char *format, ...);
void RTMP_Log(int level, const char *format, ...);
#endif
void RTMP_LogHex(int level, const uint8_t *data, unsigned long len);
void RTMP_LogHexString(int level, const uint8_t *data, unsigned long len);
void RTMP_LogSetLevel(RTMP_LogLevel lvl);
RTMP_LogLevel RTMP_LogGetLevel(void);
#ifdef __cplusplus
}
#endif
#endif

View File

@@ -0,0 +1,378 @@
#ifndef __RTMP_H__
#define __RTMP_H__
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#if !defined(NO_CRYPTO) && !defined(CRYPTO)
#define CRYPTO
#endif
#include <errno.h>
#include <stdint.h>
#include <stddef.h>
#include "amf.h"
#ifdef __cplusplus
extern "C"
{
#endif
#define RTMP_LIB_VERSION 0x020300 /* 2.3 */
#define RTMP_FEATURE_HTTP 0x01
#define RTMP_FEATURE_ENC 0x02
#define RTMP_FEATURE_SSL 0x04
#define RTMP_FEATURE_MFP 0x08 /* not yet supported */
#define RTMP_FEATURE_WRITE 0x10 /* publish, not play */
#define RTMP_FEATURE_HTTP2 0x20 /* server-side rtmpt */
#define RTMP_PROTOCOL_UNDEFINED -1
#define RTMP_PROTOCOL_RTMP 0
#define RTMP_PROTOCOL_RTMPE RTMP_FEATURE_ENC
#define RTMP_PROTOCOL_RTMPT RTMP_FEATURE_HTTP
#define RTMP_PROTOCOL_RTMPS RTMP_FEATURE_SSL
#define RTMP_PROTOCOL_RTMPTE (RTMP_FEATURE_HTTP|RTMP_FEATURE_ENC)
#define RTMP_PROTOCOL_RTMPTS (RTMP_FEATURE_HTTP|RTMP_FEATURE_SSL)
#define RTMP_PROTOCOL_RTMFP RTMP_FEATURE_MFP
#define RTMP_DEFAULT_CHUNKSIZE 128
/* needs to fit largest number of bytes recv() may return */
#define RTMP_BUFFER_CACHE_SIZE (16*1024)
#define RTMP_CHANNELS 65600
extern const char RTMPProtocolStringsLower[][7];
extern const AVal RTMP_DefaultFlashVer;
extern int RTMP_ctrlC;
uint32_t RTMP_GetTime(void);
/* RTMP_PACKET_TYPE_... 0x00 */
#define RTMP_PACKET_TYPE_CHUNK_SIZE 0x01
/* RTMP_PACKET_TYPE_... 0x02 */
#define RTMP_PACKET_TYPE_BYTES_READ_REPORT 0x03
#define RTMP_PACKET_TYPE_CONTROL 0x04
#define RTMP_PACKET_TYPE_SERVER_BW 0x05
#define RTMP_PACKET_TYPE_CLIENT_BW 0x06
/* RTMP_PACKET_TYPE_... 0x07 */
#define RTMP_PACKET_TYPE_AUDIO 0x08
#define RTMP_PACKET_TYPE_VIDEO 0x09
/* RTMP_PACKET_TYPE_... 0x0A */
/* RTMP_PACKET_TYPE_... 0x0B */
/* RTMP_PACKET_TYPE_... 0x0C */
/* RTMP_PACKET_TYPE_... 0x0D */
/* RTMP_PACKET_TYPE_... 0x0E */
#define RTMP_PACKET_TYPE_FLEX_STREAM_SEND 0x0F
#define RTMP_PACKET_TYPE_FLEX_SHARED_OBJECT 0x10
#define RTMP_PACKET_TYPE_FLEX_MESSAGE 0x11
#define RTMP_PACKET_TYPE_INFO 0x12
#define RTMP_PACKET_TYPE_SHARED_OBJECT 0x13
#define RTMP_PACKET_TYPE_INVOKE 0x14
/* RTMP_PACKET_TYPE_... 0x15 */
#define RTMP_PACKET_TYPE_FLASH_VIDEO 0x16
#define RTMP_MAX_HEADER_SIZE 18
#define RTMP_PACKET_SIZE_LARGE 0
#define RTMP_PACKET_SIZE_MEDIUM 1
#define RTMP_PACKET_SIZE_SMALL 2
#define RTMP_PACKET_SIZE_MINIMUM 3
typedef struct RTMPChunk
{
int c_headerSize;
int c_chunkSize;
char *c_chunk;
char c_header[RTMP_MAX_HEADER_SIZE];
} RTMPChunk;
typedef struct RTMPPacket
{
uint8_t m_headerType;
uint8_t m_packetType;
uint8_t m_hasAbsTimestamp; /* timestamp absolute or relative? */
int m_nChannel;
uint32_t m_nTimeStamp; /* timestamp */
int32_t m_nInfoField2; /* last 4 bytes in a long header */
uint32_t m_nBodySize;
uint32_t m_nBytesRead;
RTMPChunk *m_chunk;
char *m_body;
} RTMPPacket;
typedef struct RTMPSockBuf
{
int sb_socket;
int sb_size; /* number of unprocessed bytes in buffer */
char *sb_start; /* pointer into sb_pBuffer of next byte to process */
char sb_buf[RTMP_BUFFER_CACHE_SIZE]; /* data read from socket */
int sb_timedout;
void *sb_ssl;
} RTMPSockBuf;
void RTMPPacket_Reset(RTMPPacket *p);
void RTMPPacket_Dump(RTMPPacket *p);
int RTMPPacket_Alloc(RTMPPacket *p, uint32_t nSize);
void RTMPPacket_Free(RTMPPacket *p);
#define RTMPPacket_IsReady(a) ((a)->m_nBytesRead == (a)->m_nBodySize)
typedef struct RTMP_LNK
{
AVal hostname;
AVal sockshost;
AVal playpath0; /* parsed from URL */
AVal playpath; /* passed in explicitly */
AVal tcUrl;
AVal swfUrl;
AVal pageUrl;
AVal app;
AVal auth;
AVal flashVer;
AVal subscribepath;
AVal usherToken;
AVal token;
AVal pubUser;
AVal pubPasswd;
AMFObject extras;
int edepth;
int seekTime;
int stopTime;
#define RTMP_LF_AUTH 0x0001 /* using auth param */
#define RTMP_LF_LIVE 0x0002 /* stream is live */
#define RTMP_LF_SWFV 0x0004 /* do SWF verification */
#define RTMP_LF_PLST 0x0008 /* send playlist before play */
#define RTMP_LF_BUFX 0x0010 /* toggle stream on BufferEmpty msg */
#define RTMP_LF_FTCU 0x0020 /* free tcUrl on close */
#define RTMP_LF_FAPU 0x0040 /* free app on close */
int lFlags;
int swfAge;
int protocol;
int timeout; /* connection timeout in seconds */
int pFlags; /* unused, but kept to avoid breaking ABI */
unsigned short socksport;
unsigned short port;
#ifdef CRYPTO
#define RTMP_SWF_HASHLEN 32
void *dh; /* for encryption */
void *rc4keyIn;
void *rc4keyOut;
uint32_t SWFSize;
uint8_t SWFHash[RTMP_SWF_HASHLEN];
char SWFVerificationResponse[RTMP_SWF_HASHLEN+10];
#endif
} RTMP_LNK;
/* state for read() wrapper */
typedef struct RTMP_READ
{
char *buf;
char *bufpos;
unsigned int buflen;
uint32_t timestamp;
uint8_t dataType;
uint8_t flags;
#define RTMP_READ_HEADER 0x01
#define RTMP_READ_RESUME 0x02
#define RTMP_READ_NO_IGNORE 0x04
#define RTMP_READ_GOTKF 0x08
#define RTMP_READ_GOTFLVK 0x10
#define RTMP_READ_SEEKING 0x20
int8_t status;
#define RTMP_READ_COMPLETE -3
#define RTMP_READ_ERROR -2
#define RTMP_READ_EOF -1
#define RTMP_READ_IGNORE 0
/* if bResume == TRUE */
uint8_t initialFrameType;
uint32_t nResumeTS;
char *metaHeader;
char *initialFrame;
uint32_t nMetaHeaderSize;
uint32_t nInitialFrameSize;
uint32_t nIgnoredFrameCounter;
uint32_t nIgnoredFlvFrameCounter;
} RTMP_READ;
typedef struct RTMP_METHOD
{
AVal name;
int num;
} RTMP_METHOD;
typedef struct RTMP
{
int m_inChunkSize;
int m_outChunkSize;
int m_nBWCheckCounter;
int m_nBytesIn;
int m_nBytesInSent;
int m_nBufferMS;
int m_stream_id; /* returned in _result from createStream */
int m_mediaChannel;
uint32_t m_mediaStamp;
uint32_t m_pauseStamp;
int m_pausing;
int m_nServerBW;
int m_nClientBW;
uint8_t m_nClientBW2;
uint8_t m_bPlaying;
uint8_t m_bSendEncoding;
uint8_t m_bSendCounter;
int m_numInvokes;
int m_numCalls;
RTMP_METHOD *m_methodCalls; /* remote method calls queue */
int m_channelsAllocatedIn;
int m_channelsAllocatedOut;
RTMPPacket **m_vecChannelsIn;
RTMPPacket **m_vecChannelsOut;
int *m_channelTimestamp; /* abs timestamp of last packet */
double m_fAudioCodecs; /* audioCodecs for the connect packet */
double m_fVideoCodecs; /* videoCodecs for the connect packet */
double m_fEncoding; /* AMF0 or AMF3 */
double m_fDuration; /* duration of stream in seconds */
int m_msgCounter; /* RTMPT stuff */
int m_polling;
int m_resplen;
int m_unackd;
AVal m_clientID;
RTMP_READ m_read;
RTMPPacket m_write;
RTMPSockBuf m_sb;
RTMP_LNK Link;
} RTMP;
int RTMP_ParseURL(const char *url, int *protocol, AVal *host,
unsigned int *port, AVal *playpath, AVal *app);
void RTMP_ParsePlaypath(AVal *in, AVal *out);
void RTMP_SetBufferMS(RTMP *r, int size);
void RTMP_UpdateBufferMS(RTMP *r);
int RTMP_SetOpt(RTMP *r, const AVal *opt, AVal *arg);
int RTMP_SetupURL(RTMP *r, char *url);
void RTMP_SetupStream(RTMP *r, int protocol,
AVal *hostname,
unsigned int port,
AVal *sockshost,
AVal *playpath,
AVal *tcUrl,
AVal *swfUrl,
AVal *pageUrl,
AVal *app,
AVal *auth,
AVal *swfSHA256Hash,
uint32_t swfSize,
AVal *flashVer,
AVal *subscribepath,
AVal *usherToken,
int dStart,
int dStop, int bLiveStream, long int timeout);
int RTMP_Connect(RTMP *r, RTMPPacket *cp);
struct sockaddr;
int RTMP_Connect0(RTMP *r, struct sockaddr *svc);
int RTMP_Connect1(RTMP *r, RTMPPacket *cp);
int RTMP_Serve(RTMP *r);
int RTMP_TLS_Accept(RTMP *r, void *ctx);
int RTMP_ReadPacket(RTMP *r, RTMPPacket *packet);
int RTMP_SendPacket(RTMP *r, RTMPPacket *packet, int queue);
int RTMP_SendChunk(RTMP *r, RTMPChunk *chunk);
int RTMP_IsConnected(RTMP *r);
int RTMP_Socket(RTMP *r);
int RTMP_IsTimedout(RTMP *r);
double RTMP_GetDuration(RTMP *r);
int RTMP_ToggleStream(RTMP *r);
int RTMP_ConnectStream(RTMP *r, int seekTime);
int RTMP_ReconnectStream(RTMP *r, int seekTime);
void RTMP_DeleteStream(RTMP *r);
int RTMP_GetNextMediaPacket(RTMP *r, RTMPPacket *packet);
int RTMP_ClientPacket(RTMP *r, RTMPPacket *packet);
void RTMP_Init(RTMP *r);
void RTMP_Close(RTMP *r);
RTMP *RTMP_Alloc(void);
void RTMP_Free(RTMP *r);
void RTMP_EnableWrite(RTMP *r);
void *RTMP_TLS_AllocServerContext(const char* cert, const char* key);
void RTMP_TLS_FreeServerContext(void *ctx);
int RTMP_LibVersion(void);
void RTMP_UserInterrupt(void); /* user typed Ctrl-C */
int RTMP_SendCtrl(RTMP *r, short nType, unsigned int nObject,
unsigned int nTime);
/* caller probably doesn't know current timestamp, should
* just use RTMP_Pause instead
*/
int RTMP_SendPause(RTMP *r, int DoPause, int dTime);
int RTMP_Pause(RTMP *r, int DoPause);
int RTMP_FindFirstMatchingProperty(AMFObject *obj, const AVal *name,
AMFObjectProperty * p);
int RTMPSockBuf_Fill(RTMPSockBuf *sb);
int RTMPSockBuf_Send(RTMPSockBuf *sb, const char *buf, int len);
int RTMPSockBuf_Close(RTMPSockBuf *sb);
int RTMP_SendCreateStream(RTMP *r);
int RTMP_SendSeek(RTMP *r, int dTime);
int RTMP_SendServerBW(RTMP *r);
int RTMP_SendClientBW(RTMP *r);
void RTMP_DropRequest(RTMP *r, int i, int freeit);
int RTMP_Read(RTMP *r, char *buf, int size);
int RTMP_Write(RTMP *r, const char *buf, int size);
/* hashswf.c */
int RTMP_HashSWF(const char *url, unsigned int *size, unsigned char *hash,
int age);
#ifdef __cplusplus
};
#endif
#endif

View File

@@ -0,0 +1,141 @@
#ifndef __RTMP_SYS_H__
#define __RTMP_SYS_H__
/*
* Copyright (C) 2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifdef _WIN32
#include <winsock2.h>
#include <ws2tcpip.h>
#ifdef _MSC_VER /* MSVC */
#if _MSC_VER < 1900
#define snprintf _snprintf
#define vsnprintf _vsnprintf
#endif
#define strcasecmp _stricmp
#define strncasecmp _strnicmp
#endif
#define GetSockError() WSAGetLastError()
#define SetSockError(e) WSASetLastError(e)
#define setsockopt(a,b,c,d,e) (setsockopt)(a,b,c,(const char *)d,(int)e)
#define EWOULDBLOCK WSAETIMEDOUT /* we don't use nonblocking, but we do use timeouts */
#define sleep(n) Sleep(n*1000)
#define msleep(n) Sleep(n)
#define SET_RCVTIMEO(tv,s) int tv = s*1000
#else /* !_WIN32 */
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/times.h>
#include <netdb.h>
#include <unistd.h>
#include <netinet/in.h>
#include <netinet/tcp.h>
#include <arpa/inet.h>
#define GetSockError() errno
#define SetSockError(e) errno = e
#undef closesocket
#define closesocket(s) close(s)
#define msleep(n) usleep(n*1000)
#define SET_RCVTIMEO(tv,s) struct timeval tv = {s,0}
#endif
#include "rtmp.h"
#ifdef USE_POLARSSL
#include <polarssl/version.h>
#include <polarssl/net.h>
#include <polarssl/ssl.h>
#include <polarssl/havege.h>
#if POLARSSL_VERSION_NUMBER < 0x01010000
#define havege_random havege_rand
#endif
#if POLARSSL_VERSION_NUMBER >= 0x01020000
#define SSL_SET_SESSION(S,resume,timeout,ctx) ssl_set_session(S,ctx)
#else
#define SSL_SET_SESSION(S,resume,timeout,ctx) ssl_set_session(S,resume,timeout,ctx)
#endif
typedef struct tls_ctx {
havege_state hs;
ssl_session ssn;
} tls_ctx;
typedef struct tls_server_ctx {
havege_state *hs;
x509_cert cert;
rsa_context key;
ssl_session ssn;
const char *dhm_P, *dhm_G;
} tls_server_ctx;
#define TLS_CTX tls_ctx *
#define TLS_client(ctx,s) s = malloc(sizeof(ssl_context)); ssl_init(s);\
ssl_set_endpoint(s, SSL_IS_CLIENT); ssl_set_authmode(s, SSL_VERIFY_NONE);\
ssl_set_rng(s, havege_random, &ctx->hs);\
ssl_set_ciphersuites(s, ssl_default_ciphersuites);\
SSL_SET_SESSION(s, 1, 600, &ctx->ssn)
#define TLS_server(ctx,s) s = malloc(sizeof(ssl_context)); ssl_init(s);\
ssl_set_endpoint(s, SSL_IS_SERVER); ssl_set_authmode(s, SSL_VERIFY_NONE);\
ssl_set_rng(s, havege_random, ((tls_server_ctx*)ctx)->hs);\
ssl_set_ciphersuites(s, ssl_default_ciphersuites);\
SSL_SET_SESSION(s, 1, 600, &((tls_server_ctx*)ctx)->ssn);\
ssl_set_own_cert(s, &((tls_server_ctx*)ctx)->cert, &((tls_server_ctx*)ctx)->key);\
ssl_set_dh_param(s, ((tls_server_ctx*)ctx)->dhm_P, ((tls_server_ctx*)ctx)->dhm_G)
#define TLS_setfd(s,fd) ssl_set_bio(s, net_recv, &fd, net_send, &fd)
#define TLS_connect(s) ssl_handshake(s)
#define TLS_accept(s) ssl_handshake(s)
#define TLS_read(s,b,l) ssl_read(s,(unsigned char *)b,l)
#define TLS_write(s,b,l) ssl_write(s,(unsigned char *)b,l)
#define TLS_shutdown(s) ssl_close_notify(s)
#define TLS_close(s) ssl_free(s); free(s)
#elif defined(USE_GNUTLS)
#include <gnutls/gnutls.h>
typedef struct tls_ctx {
gnutls_certificate_credentials_t cred;
gnutls_priority_t prios;
} tls_ctx;
#define TLS_CTX tls_ctx *
#define TLS_client(ctx,s) gnutls_init((gnutls_session_t *)(&s), GNUTLS_CLIENT); gnutls_priority_set(s, ctx->prios); gnutls_credentials_set(s, GNUTLS_CRD_CERTIFICATE, ctx->cred)
#define TLS_server(ctx,s) gnutls_init((gnutls_session_t *)(&s), GNUTLS_SERVER); gnutls_priority_set_direct(s, "NORMAL", NULL); gnutls_credentials_set(s, GNUTLS_CRD_CERTIFICATE, ctx)
#define TLS_setfd(s,fd) gnutls_transport_set_ptr(s, (gnutls_transport_ptr_t)(long)fd)
#define TLS_connect(s) gnutls_handshake(s)
#define TLS_accept(s) gnutls_handshake(s)
#define TLS_read(s,b,l) gnutls_record_recv(s,b,l)
#define TLS_write(s,b,l) gnutls_record_send(s,b,l)
#define TLS_shutdown(s) gnutls_bye(s, GNUTLS_SHUT_RDWR)
#define TLS_close(s) gnutls_deinit(s)
#else /* USE_OPENSSL */
#define TLS_CTX SSL_CTX *
#define TLS_client(ctx,s) s = SSL_new(ctx)
#define TLS_server(ctx,s) s = SSL_new(ctx)
#define TLS_setfd(s,fd) SSL_set_fd(s,fd)
#define TLS_connect(s) SSL_connect(s)
#define TLS_accept(s) SSL_accept(s)
#define TLS_read(s,b,l) SSL_read(s,b,l)
#define TLS_write(s,b,l) SSL_write(s,b,l)
#define TLS_shutdown(s) SSL_shutdown(s)
#define TLS_close(s) SSL_free(s)
#endif
#endif

View File

@@ -16,7 +16,7 @@ import com.omixlab.lckcontrol.data.local.entity.StreamPlanEntity
StreamPlanEntity::class, StreamPlanEntity::class,
StreamDestinationEntity::class, StreamDestinationEntity::class,
], ],
version = 3, version = 5,
exportSchema = false, exportSchema = false,
) )
abstract class LckDatabase : RoomDatabase() { abstract class LckDatabase : RoomDatabase() {
@@ -96,5 +96,18 @@ abstract class LckDatabase : RoomDatabase() {
db.execSQL("ALTER TABLE stream_destinations ADD COLUMN linkedAccountId TEXT NOT NULL DEFAULT ''") db.execSQL("ALTER TABLE stream_destinations ADD COLUMN linkedAccountId TEXT NOT NULL DEFAULT ''")
} }
} }
val MIGRATION_3_4 = object : Migration(3, 4) {
override fun migrate(db: SupportSQLiteDatabase) {
db.execSQL("ALTER TABLE stream_plans ADD COLUMN executionMode TEXT NOT NULL DEFAULT 'IN_GAME'")
}
}
val MIGRATION_4_5 = object : Migration(4, 5) {
override fun migrate(db: SupportSQLiteDatabase) {
db.execSQL("ALTER TABLE linked_accounts ADD COLUMN isEnabled INTEGER NOT NULL DEFAULT 1")
db.execSQL("ALTER TABLE stream_plans ADD COLUMN gameId TEXT NOT NULL DEFAULT ''")
}
}
} }
} }

View File

@@ -33,4 +33,7 @@ interface LinkedAccountDao {
@Query("DELETE FROM linked_accounts WHERE serviceId = :serviceId") @Query("DELETE FROM linked_accounts WHERE serviceId = :serviceId")
suspend fun deleteByService(serviceId: String) suspend fun deleteByService(serviceId: String)
@Query("UPDATE linked_accounts SET isEnabled = :isEnabled WHERE id = :id")
suspend fun setEnabled(id: String, isEnabled: Boolean)
} }

View File

@@ -10,4 +10,5 @@ data class LinkedAccountEntity(
val displayName: String, val displayName: String,
val accountId: String, val accountId: String,
val avatarUrl: String? = null, val avatarUrl: String? = null,
val isEnabled: Boolean = true,
) )

View File

@@ -8,5 +8,7 @@ data class StreamPlanEntity(
@PrimaryKey val planId: String, @PrimaryKey val planId: String,
val name: String, val name: String,
val status: String = "DRAFT", val status: String = "DRAFT",
val executionMode: String = "IN_GAME",
val gameId: String = "",
val createdAt: Long = System.currentTimeMillis(), val createdAt: Long = System.currentTimeMillis(),
) )

View File

@@ -67,6 +67,8 @@ data class LinkedAccountResponse(
@JsonClass(generateAdapter = true) @JsonClass(generateAdapter = true)
data class CreateStreamPlanRequest( data class CreateStreamPlanRequest(
val name: String, val name: String,
val executionMode: String? = null,
val gameId: String? = null,
val destinations: List<CreateDestinationRequest>, val destinations: List<CreateDestinationRequest>,
) )
@@ -85,6 +87,8 @@ data class StreamPlanResponse(
val id: String, val id: String,
val name: String, val name: String,
val status: String, val status: String,
val executionMode: String? = null,
val gameId: String? = null,
val createdAt: String, val createdAt: String,
val updatedAt: String, val updatedAt: String,
val destinations: List<StreamDestinationResponse>, val destinations: List<StreamDestinationResponse>,

View File

@@ -26,6 +26,8 @@ class AccountRepository @Inject constructor(
/** Fetch accounts from backend and sync to Room cache */ /** Fetch accounts from backend and sync to Room cache */
suspend fun syncAccounts() { suspend fun syncAccounts() {
val remote = apiService.getLinkedAccounts() val remote = apiService.getLinkedAccounts()
// Read local entities to preserve isEnabled across sync
val localMap = accountDao.getAll().associateBy { it.id }
val entities = remote.map { account -> val entities = remote.map { account ->
LinkedAccountEntity( LinkedAccountEntity(
id = account.id, id = account.id,
@@ -33,12 +35,12 @@ class AccountRepository @Inject constructor(
displayName = account.displayName, displayName = account.displayName,
accountId = account.accountId, accountId = account.accountId,
avatarUrl = account.avatarUrl, avatarUrl = account.avatarUrl,
isEnabled = localMap[account.id]?.isEnabled ?: true,
) )
} }
// Get current local accounts to detect removals // Detect removals
val local = accountDao.getAll()
val remoteIds = entities.map { it.id }.toSet() val remoteIds = entities.map { it.id }.toSet()
for (localAccount in local) { for (localAccount in localMap.values) {
if (localAccount.id !in remoteIds) { if (localAccount.id !in remoteIds) {
accountDao.deleteById(localAccount.id) accountDao.deleteById(localAccount.id)
} }
@@ -48,6 +50,10 @@ class AccountRepository @Inject constructor(
} }
} }
suspend fun setAccountEnabled(id: String, enabled: Boolean) {
accountDao.setEnabled(id, enabled)
}
/** Get YouTube OAuth URL from backend (for Custom Tabs) */ /** Get YouTube OAuth URL from backend (for Custom Tabs) */
suspend fun getYouTubeAuthUrl(): String { suspend fun getYouTubeAuthUrl(): String {
val response = apiService.getYouTubeAuthUrl() val response = apiService.getYouTubeAuthUrl()
@@ -85,5 +91,6 @@ class AccountRepository @Inject constructor(
accountId = accountId, accountId = accountId,
avatarUrl = avatarUrl, avatarUrl = avatarUrl,
isAuthenticated = true, // Backend manages auth state isAuthenticated = true, // Backend manages auth state
isEnabled = isEnabled,
) )
} }

View File

@@ -42,9 +42,16 @@ class StreamPlanRepository @Inject constructor(
} }
/** Create plan via backend and cache locally */ /** Create plan via backend and cache locally */
suspend fun createPlan(name: String, destinations: List<StreamDestination>): StreamPlan { suspend fun createPlan(
name: String,
destinations: List<StreamDestination>,
executionMode: String = "IN_GAME",
gameId: String = "",
): StreamPlan {
val request = CreateStreamPlanRequest( val request = CreateStreamPlanRequest(
name = name, name = name,
executionMode = executionMode,
gameId = gameId.ifBlank { null },
destinations = destinations.map { dest -> destinations = destinations.map { dest ->
CreateDestinationRequest( CreateDestinationRequest(
linkedAccountId = dest.linkedAccountId, linkedAccountId = dest.linkedAccountId,
@@ -96,7 +103,13 @@ class StreamPlanRepository @Inject constructor(
} }
private suspend fun cacheRemotePlan(remote: StreamPlanResponse) { private suspend fun cacheRemotePlan(remote: StreamPlanResponse) {
val planEntity = StreamPlanEntity(planId = remote.id, name = remote.name, status = remote.status) val planEntity = StreamPlanEntity(
planId = remote.id,
name = remote.name,
status = remote.status,
executionMode = remote.executionMode ?: "IN_GAME",
gameId = remote.gameId ?: "",
)
val destEntities = remote.destinations.map { d -> val destEntities = remote.destinations.map { d ->
StreamDestinationEntity( StreamDestinationEntity(
id = d.id, id = d.id,
@@ -121,6 +134,8 @@ class StreamPlanRepository @Inject constructor(
planId = plan.planId, planId = plan.planId,
name = plan.name, name = plan.name,
status = plan.status, status = plan.status,
executionMode = plan.executionMode,
gameId = plan.gameId,
destinations = destinations.map { it.toStreamDestination() }, destinations = destinations.map { it.toStreamDestination() },
) )

View File

@@ -20,7 +20,7 @@ object DatabaseModule {
@Singleton @Singleton
fun provideDatabase(@ApplicationContext context: Context): LckDatabase = fun provideDatabase(@ApplicationContext context: Context): LckDatabase =
Room.databaseBuilder(context, LckDatabase::class.java, "lck_control.db") Room.databaseBuilder(context, LckDatabase::class.java, "lck_control.db")
.addMigrations(LckDatabase.MIGRATION_1_2, LckDatabase.MIGRATION_2_3) .addMigrations(LckDatabase.MIGRATION_1_2, LckDatabase.MIGRATION_2_3, LckDatabase.MIGRATION_3_4, LckDatabase.MIGRATION_4_5)
.build() .build()
@Provides @Provides

View File

@@ -26,6 +26,9 @@ import com.omixlab.lckcontrol.shared.LinkedAccount
import com.omixlab.lckcontrol.shared.StreamDestination import com.omixlab.lckcontrol.shared.StreamDestination
import com.omixlab.lckcontrol.shared.StreamPlan import com.omixlab.lckcontrol.shared.StreamPlan
import com.omixlab.lckcontrol.shared.StreamPlanConfig import com.omixlab.lckcontrol.shared.StreamPlanConfig
import com.omixlab.lckcontrol.shared.StreamingConfig
import com.omixlab.lckcontrol.streaming.StreamingManager
import com.omixlab.lckcontrol.streaming.StreamingState
import dagger.hilt.android.AndroidEntryPoint import dagger.hilt.android.AndroidEntryPoint
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
@@ -48,15 +51,18 @@ class LckControlService : Service() {
private const val NOTIFICATION_ID = 1 private const val NOTIFICATION_ID = 1
private const val QUEST_APP_ID = "25653777174321448" private const val QUEST_APP_ID = "25653777174321448"
private const val TOKEN_REFRESH_INTERVAL_MS = 60_000L private const val TOKEN_REFRESH_INTERVAL_MS = 60_000L
private const val ACTION_BIND_STREAMING = "com.omixlab.lckcontrol.BIND_STREAMING"
} }
@Inject lateinit var accountRepository: AccountRepository @Inject lateinit var accountRepository: AccountRepository
@Inject lateinit var streamPlanRepository: StreamPlanRepository @Inject lateinit var streamPlanRepository: StreamPlanRepository
@Inject lateinit var tokenStore: TokenStore @Inject lateinit var tokenStore: TokenStore
@Inject lateinit var apiService: LckApiService @Inject lateinit var apiService: LckApiService
@Inject lateinit var streamingManager: StreamingManager
private val serviceScope = CoroutineScope(SupervisorJob() + Dispatchers.Main) private val serviceScope = CoroutineScope(SupervisorJob() + Dispatchers.Main)
private val clientTracker = ClientTracker() private val clientTracker = ClientTracker()
private var streamingServiceImpl: LckStreamingServiceImpl? = null
private val callbacks = object : RemoteCallbackList<ILckControlCallback>() { private val callbacks = object : RemoteCallbackList<ILckControlCallback>() {
override fun onCallbackDied(callback: ILckControlCallback, cookie: Any?) { override fun onCallbackDied(callback: ILckControlCallback, cookie: Any?) {
val uid = cookie as? Int ?: return val uid = cookie as? Int ?: return
@@ -95,13 +101,20 @@ class LckControlService : Service() {
// ── Stream plans ──────────────────────────────────── // ── Stream plans ────────────────────────────────────
override fun createStreamPlan(config: StreamPlanConfig): StreamPlan = runBlocking { override fun createStreamPlan(config: StreamPlanConfig): StreamPlan = runBlocking {
val plan = streamPlanRepository.createPlan(config.name, config.destinations) val plan = streamPlanRepository.createPlan(
name = config.name,
destinations = config.destinations,
executionMode = config.executionMode,
gameId = config.gameId,
)
broadcastPlansChanged() broadcastPlansChanged()
plan plan
} }
override fun createDefaultPlan(clientName: String): StreamPlan = runBlocking { override fun createDefaultPlan(clientName: String): StreamPlan = runBlocking {
val accounts = accountRepository.getAccounts() val accounts = accountRepository.getAccounts().filter { it.isEnabled }
val gameId = clientTracker.getAll()
.find { it.clientName == clientName }?.packageName ?: ""
val destinations = accounts.map { account -> val destinations = accounts.map { account ->
StreamDestination( StreamDestination(
service = account.serviceId, service = account.serviceId,
@@ -110,7 +123,11 @@ class LckControlService : Service() {
privacyStatus = "unlisted", privacyStatus = "unlisted",
) )
} }
val plan = streamPlanRepository.createPlan("$clientName Stream", destinations) val plan = streamPlanRepository.createPlan(
name = "$clientName Stream",
destinations = destinations,
gameId = gameId,
)
broadcastPlansChanged() broadcastPlansChanged()
plan plan
} }
@@ -137,6 +154,17 @@ class LckControlService : Service() {
try { try {
streamPlanRepository.startPlan(planId) streamPlanRepository.startPlan(planId)
val updated = streamPlanRepository.getPlan(planId) val updated = streamPlanRepository.getPlan(planId)
// If APP_STREAMING mode, start the streaming engine
if (updated?.executionMode == "APP_STREAMING") {
streamingManager.startStreaming(
plan = updated,
config = StreamingConfig(),
width = 1920,
height = 1080,
)
}
if (updated != null) broadcastPlanUpdated(updated) if (updated != null) broadcastPlanUpdated(updated)
true true
} catch (_: Exception) { false } } catch (_: Exception) { false }
@@ -147,6 +175,11 @@ class LckControlService : Service() {
if (plan.status == "ENDED") return@runBlocking true if (plan.status == "ENDED") return@runBlocking true
if (plan.status != "LIVE" && plan.status != "READY") return@runBlocking false if (plan.status != "LIVE" && plan.status != "READY") return@runBlocking false
try { try {
// Stop streaming engine if running
if (plan.executionMode == "APP_STREAMING") {
streamingManager.stopStreaming()
}
streamPlanRepository.endPlan(planId) streamPlanRepository.endPlan(planId)
val updated = streamPlanRepository.getPlan(planId) val updated = streamPlanRepository.getPlan(planId)
if (updated != null) broadcastPlanUpdated(updated) if (updated != null) broadcastPlanUpdated(updated)
@@ -222,11 +255,37 @@ class LckControlService : Service() {
} }
} }
} }
// Forward streaming state changes to AIDL callbacks
serviceScope.launch {
streamingManager.state.collect { state ->
streamingServiceImpl?.broadcastStateChanged(state)
}
}
serviceScope.launch {
streamingManager.stats.collect { stats ->
streamingServiceImpl?.broadcastStats(
stats.videoBitrate, stats.audioBitrate, stats.fps, stats.droppedFrames,
)
}
}
} }
override fun onBind(intent: Intent?): IBinder = binder override fun onBind(intent: Intent?): IBinder? {
return when (intent?.action) {
ACTION_BIND_STREAMING -> {
if (streamingServiceImpl == null) {
streamingServiceImpl = LckStreamingServiceImpl(streamingManager)
}
streamingServiceImpl!!.asBinder()
}
else -> binder
}
}
override fun onDestroy() { override fun onDestroy() {
streamingManager.stopStreaming()
streamingServiceImpl?.kill()
serviceScope.cancel() serviceScope.cancel()
callbacks.kill() callbacks.kill()
super.onDestroy() super.onDestroy()

View File

@@ -0,0 +1,138 @@
package com.omixlab.lckcontrol.service
import android.hardware.HardwareBuffer
import android.os.ParcelFileDescriptor
import android.os.RemoteCallbackList
import android.util.Log
import com.omixlab.lckcontrol.shared.ILckStreamingCallback
import com.omixlab.lckcontrol.shared.ILckStreamingService
import com.omixlab.lckcontrol.streaming.StreamingManager
import com.omixlab.lckcontrol.streaming.StreamingState
/**
* AIDL implementation for ILckStreamingService.
* Bridges AIDL IPC calls to the StreamingManager.
* Frame submission methods are one-way for non-blocking game render thread.
*/
class LckStreamingServiceImpl(
private val streamingManager: StreamingManager,
) : ILckStreamingService.Stub() {
companion object {
private const val TAG = "LckStreamingServiceImpl"
}
private val callbacks = RemoteCallbackList<ILckStreamingCallback>()
init {
// Forward state changes to AIDL callbacks
// Note: state observation requires coroutine scope — delegated to LckControlService
}
override fun registerTexturePool(
buffers: Array<HardwareBuffer>,
width: Int,
height: Int,
format: Int,
) {
Log.d(TAG, "registerTexturePool: ${buffers.size} buffers, ${width}x$height")
streamingManager.registerTexturePool(buffers, width, height, format)
}
override fun unregisterTexturePool() {
Log.d(TAG, "unregisterTexturePool")
streamingManager.unregisterTexturePool()
}
override fun submitVideoFrame(
bufferIndex: Int,
timestampNs: Long,
gpuFence: ParcelFileDescriptor?,
) {
val fenceFd = gpuFence?.detachFd() ?: -1
streamingManager.submitVideoFrame(bufferIndex, timestampNs, fenceFd)
}
override fun submitAudioFrame(
pcmData: ByteArray,
timestampNs: Long,
sampleRate: Int,
channels: Int,
bitsPerSample: Int,
) {
streamingManager.submitAudioFrame(pcmData, timestampNs)
}
override fun isStreaming(): Boolean {
return streamingManager.isStreaming()
}
override fun registerStreamingCallback(callback: ILckStreamingCallback) {
callbacks.register(callback)
}
override fun unregisterStreamingCallback(callback: ILckStreamingCallback) {
callbacks.unregister(callback)
}
// ── Broadcast helpers (called from LckControlService coroutine scope) ──
fun broadcastStateChanged(state: StreamingState) {
val stateStr = state.name
val count = callbacks.beginBroadcast()
try {
for (i in 0 until count) {
try {
callbacks.getBroadcastItem(i).onStreamingStateChanged(stateStr)
} catch (_: Exception) {}
}
} finally {
callbacks.finishBroadcast()
}
}
fun broadcastStats(videoBitrate: Long, audioBitrate: Long, fps: Int, droppedFrames: Int) {
val count = callbacks.beginBroadcast()
try {
for (i in 0 until count) {
try {
callbacks.getBroadcastItem(i).onStreamingStats(
videoBitrate, audioBitrate, fps, droppedFrames,
)
} catch (_: Exception) {}
}
} finally {
callbacks.finishBroadcast()
}
}
fun broadcastError(code: Int, message: String) {
val count = callbacks.beginBroadcast()
try {
for (i in 0 until count) {
try {
callbacks.getBroadcastItem(i).onStreamingError(code, message)
} catch (_: Exception) {}
}
} finally {
callbacks.finishBroadcast()
}
}
fun broadcastBufferReleased(bufferIndex: Int) {
val count = callbacks.beginBroadcast()
try {
for (i in 0 until count) {
try {
callbacks.getBroadcastItem(i).onBufferReleased(bufferIndex)
} catch (_: Exception) {}
}
} finally {
callbacks.finishBroadcast()
}
}
fun kill() {
callbacks.kill()
}
}

View File

@@ -0,0 +1,112 @@
package com.omixlab.lckcontrol.streaming
import android.hardware.HardwareBuffer
import android.util.Log
/**
* Thin JNI wrapper around the C++ StreamingEngine.
* All encoding, muxing, and RTMP streaming happens in native code (zero-copy pipeline).
*/
class NativeStreamingEngine {
companion object {
private const val TAG = "NativeStreamingEngine"
init {
System.loadLibrary("lck_streaming")
}
}
private var nativePtr: Long = 0
var onStats: ((StreamingStats) -> Unit)? = null
var onError: ((Int, String) -> Unit)? = null
var onBufferReleased: ((Int) -> Unit)? = null
fun create(
width: Int,
height: Int,
videoBitrate: Int,
audioBitrate: Int,
sampleRate: Int,
channels: Int,
keyframeInterval: Int,
) {
if (nativePtr != 0L) {
Log.w(TAG, "Engine already created, destroying first")
destroy()
}
nativePtr = nativeCreate(width, height, videoBitrate, audioBitrate,
sampleRate, channels, keyframeInterval)
}
fun addDestination(rtmpUrl: String): Int {
check(nativePtr != 0L) { "Engine not created" }
return nativeAddDestination(nativePtr, rtmpUrl)
}
fun start(): Boolean {
check(nativePtr != 0L) { "Engine not created" }
return nativeStart(nativePtr)
}
fun submitVideoFrame(hardwareBuffer: HardwareBuffer, timestampNs: Long, fenceFd: Int) {
if (nativePtr == 0L) return
nativeSubmitVideoFrame(nativePtr, hardwareBuffer, timestampNs, fenceFd)
}
fun submitAudioFrame(pcmData: ByteArray, timestampNs: Long) {
if (nativePtr == 0L) return
nativeSubmitAudioFrame(nativePtr, pcmData, timestampNs)
}
fun stop() {
if (nativePtr == 0L) return
nativeStop(nativePtr)
}
fun destroy() {
if (nativePtr != 0L) {
nativeDestroy(nativePtr)
nativePtr = 0
}
}
fun isRunning(): Boolean {
if (nativePtr == 0L) return false
return nativeIsRunning(nativePtr)
}
// Called from native code (JNI callbacks)
@Suppress("unused")
private fun onNativeStats(videoBitrate: Long, audioBitrate: Long, fps: Int, droppedFrames: Int) {
onStats?.invoke(StreamingStats(videoBitrate, audioBitrate, fps, droppedFrames))
}
@Suppress("unused")
private fun onNativeError(code: Int, message: String) {
Log.e(TAG, "Native error $code: $message")
onError?.invoke(code, message)
}
@Suppress("unused")
private fun onNativeBufferReleased(bufferIndex: Int) {
onBufferReleased?.invoke(bufferIndex)
}
// Native methods
private external fun nativeCreate(
width: Int, height: Int,
videoBitrate: Int, audioBitrate: Int,
sampleRate: Int, channels: Int,
keyframeInterval: Int,
): Long
private external fun nativeAddDestination(ptr: Long, rtmpUrl: String): Int
private external fun nativeStart(ptr: Long): Boolean
private external fun nativeSubmitVideoFrame(ptr: Long, hardwareBuffer: HardwareBuffer, timestampNs: Long, fenceFd: Int)
private external fun nativeSubmitAudioFrame(ptr: Long, pcmData: ByteArray, timestampNs: Long)
private external fun nativeStop(ptr: Long)
private external fun nativeDestroy(ptr: Long)
private external fun nativeIsRunning(ptr: Long): Boolean
}

View File

@@ -0,0 +1,156 @@
package com.omixlab.lckcontrol.streaming
import android.hardware.HardwareBuffer
import android.util.Log
import com.omixlab.lckcontrol.shared.StreamPlan
import com.omixlab.lckcontrol.shared.StreamingConfig
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.flow.asStateFlow
import javax.inject.Inject
import javax.inject.Singleton
enum class StreamingState {
IDLE, STARTING, LIVE, STOPPING, ERROR
}
/**
* High-level streaming lifecycle manager.
* Bridges stream plan configuration to the native streaming engine.
* Stream keys and RTMP URLs stay within the app process — never exposed via AIDL.
*/
@Singleton
class StreamingManager @Inject constructor() {
companion object {
private const val TAG = "StreamingManager"
}
private var engine: NativeStreamingEngine? = null
private var texturePoolBuffers: Array<HardwareBuffer>? = null
private val _state = MutableStateFlow(StreamingState.IDLE)
val state: StateFlow<StreamingState> = _state.asStateFlow()
private val _stats = MutableStateFlow(StreamingStats())
val stats: StateFlow<StreamingStats> = _stats.asStateFlow()
private val _error = MutableStateFlow<String?>(null)
val error: StateFlow<String?> = _error.asStateFlow()
/**
* Start streaming for a plan with APP_STREAMING execution mode.
* RTMP URLs are constructed internally from the plan's destinations.
*/
fun startStreaming(plan: StreamPlan, config: StreamingConfig, width: Int, height: Int) {
if (_state.value != StreamingState.IDLE) {
Log.w(TAG, "Cannot start streaming, current state: ${_state.value}")
return
}
val destinations = plan.destinations.filter {
it.rtmpUrl.isNotBlank() && it.streamKey.isNotBlank()
}
if (destinations.isEmpty()) {
_error.value = "No destinations with RTMP credentials"
_state.value = StreamingState.ERROR
return
}
_state.value = StreamingState.STARTING
_error.value = null
try {
val eng = NativeStreamingEngine()
eng.create(
width = width,
height = height,
videoBitrate = config.videoBitrate,
audioBitrate = config.audioBitrate,
sampleRate = config.audioSampleRate,
channels = config.audioChannels,
keyframeInterval = config.keyFrameInterval,
)
// Add RTMP destinations — stream keys stay in-process
for (dest in destinations) {
val fullUrl = "${dest.rtmpUrl}/${dest.streamKey}"
eng.addDestination(fullUrl)
Log.d(TAG, "Added destination: ${dest.service}")
}
eng.onStats = { stats ->
_stats.value = stats
}
eng.onError = { code, message ->
Log.e(TAG, "Streaming error $code: $message")
_error.value = message
_state.value = StreamingState.ERROR
}
if (eng.start()) {
engine = eng
_state.value = StreamingState.LIVE
Log.i(TAG, "Streaming started with ${destinations.size} destinations")
} else {
eng.destroy()
_error.value = "Failed to start streaming engine"
_state.value = StreamingState.ERROR
}
} catch (e: Exception) {
Log.e(TAG, "Failed to start streaming", e)
_error.value = e.message ?: "Unknown error"
_state.value = StreamingState.ERROR
}
}
/**
* Register texture pool buffers from the game.
* Buffers are stored for reference — the native engine receives individual
* buffers via submitVideoFrame.
*/
fun registerTexturePool(buffers: Array<HardwareBuffer>, width: Int, height: Int, format: Int) {
texturePoolBuffers = buffers
Log.d(TAG, "Texture pool registered: ${buffers.size} buffers, ${width}x${height}")
}
fun unregisterTexturePool() {
texturePoolBuffers = null
Log.d(TAG, "Texture pool unregistered")
}
/** Forward a video frame from the game to the native engine. */
fun submitVideoFrame(bufferIndex: Int, timestampNs: Long, fenceFd: Int) {
val buffers = texturePoolBuffers ?: return
if (bufferIndex < 0 || bufferIndex >= buffers.size) return
engine?.submitVideoFrame(buffers[bufferIndex], timestampNs, fenceFd)
}
/** Forward audio PCM from the game to the native engine. */
fun submitAudioFrame(pcmData: ByteArray, timestampNs: Long) {
engine?.submitAudioFrame(pcmData, timestampNs)
}
/** Stop streaming and release all resources. */
fun stopStreaming() {
if (_state.value != StreamingState.LIVE && _state.value != StreamingState.ERROR) {
return
}
_state.value = StreamingState.STOPPING
engine?.let { eng ->
eng.stop()
eng.destroy()
}
engine = null
_state.value = StreamingState.IDLE
_stats.value = StreamingStats()
Log.i(TAG, "Streaming stopped")
}
fun isStreaming(): Boolean = _state.value == StreamingState.LIVE
}

View File

@@ -0,0 +1,8 @@
package com.omixlab.lckcontrol.streaming
data class StreamingStats(
val videoBitrate: Long = 0,
val audioBitrate: Long = 0,
val fps: Int = 0,
val droppedFrames: Int = 0,
)

View File

@@ -23,6 +23,7 @@ import androidx.compose.material3.OutlinedButton
import androidx.compose.material3.Scaffold import androidx.compose.material3.Scaffold
import androidx.compose.material3.SnackbarHost import androidx.compose.material3.SnackbarHost
import androidx.compose.material3.SnackbarHostState import androidx.compose.material3.SnackbarHostState
import androidx.compose.material3.Switch
import androidx.compose.material3.Text import androidx.compose.material3.Text
import androidx.compose.material3.TopAppBar import androidx.compose.material3.TopAppBar
import androidx.compose.runtime.Composable import androidx.compose.runtime.Composable
@@ -80,6 +81,10 @@ fun AccountsScreen(
Text(account.displayName, style = MaterialTheme.typography.titleSmall) Text(account.displayName, style = MaterialTheme.typography.titleSmall)
Text(account.serviceId, style = MaterialTheme.typography.bodySmall) Text(account.serviceId, style = MaterialTheme.typography.bodySmall)
} }
Switch(
checked = account.isEnabled,
onCheckedChange = { viewModel.toggleAccountEnabled(account.id, it) },
)
IconButton(onClick = { viewModel.unlinkAccount(account.id) }) { IconButton(onClick = { viewModel.unlinkAccount(account.id) }) {
Icon(Icons.Default.LinkOff, contentDescription = "Unlink") Icon(Icons.Default.LinkOff, contentDescription = "Unlink")
} }

View File

@@ -65,6 +65,16 @@ class AccountsViewModel @Inject constructor(
} }
} }
fun toggleAccountEnabled(accountId: String, enabled: Boolean) {
viewModelScope.launch {
try {
accountRepository.setAccountEnabled(accountId, enabled)
} catch (e: Exception) {
_linkError.value = e.message ?: "Failed to update account"
}
}
}
fun unlinkAccount(accountId: String) { fun unlinkAccount(accountId: String) {
viewModelScope.launch { viewModelScope.launch {
try { try {

View File

@@ -9,6 +9,7 @@ import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.foundation.layout.fillMaxWidth import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.height import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.padding import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.size
import androidx.compose.foundation.layout.width import androidx.compose.foundation.layout.width
import androidx.compose.foundation.lazy.LazyColumn import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.lazy.items import androidx.compose.foundation.lazy.items
@@ -41,8 +42,8 @@ fun DashboardScreen(
onNavigateToPlan: (String) -> Unit, onNavigateToPlan: (String) -> Unit,
viewModel: DashboardViewModel = hiltViewModel(), viewModel: DashboardViewModel = hiltViewModel(),
) { ) {
val accounts by viewModel.accounts.collectAsStateWithLifecycle()
val plans by viewModel.plans.collectAsStateWithLifecycle() val plans by viewModel.plans.collectAsStateWithLifecycle()
val backendHealthy by viewModel.backendHealthy.collectAsStateWithLifecycle()
Scaffold( Scaffold(
topBar = { topBar = {
@@ -63,35 +64,28 @@ fun DashboardScreen(
) { ) {
item { item {
Spacer(Modifier.height(8.dp)) Spacer(Modifier.height(8.dp))
Text("Linked Accounts", style = MaterialTheme.typography.titleMedium) Text("Server Status", style = MaterialTheme.typography.titleMedium)
Spacer(Modifier.height(4.dp)) Spacer(Modifier.height(4.dp))
} ElevatedCard(modifier = Modifier.fillMaxWidth()) {
Row(
if (accounts.isEmpty()) { modifier = Modifier.padding(16.dp),
item { verticalAlignment = Alignment.CenterVertically,
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.surfaceVariant,
),
) { ) {
Text( val (color, label) = when (backendHealthy) {
"No accounts linked yet. Go to Accounts to get started.", true -> MaterialTheme.colorScheme.primary to "Connected"
modifier = Modifier.padding(16.dp), false -> MaterialTheme.colorScheme.error to "Unreachable"
style = MaterialTheme.typography.bodyMedium, null -> MaterialTheme.colorScheme.outline to "Checking..."
}
Icon(
Icons.Default.Circle,
contentDescription = label,
tint = color,
modifier = Modifier.size(12.dp),
) )
} Spacer(Modifier.width(12.dp))
} Column {
} else { Text("Backend", style = MaterialTheme.typography.titleSmall)
item { Text(label, style = MaterialTheme.typography.bodySmall, color = color)
Row(horizontalArrangement = Arrangement.spacedBy(8.dp)) {
accounts.forEach { account ->
ElevatedCard {
Column(modifier = Modifier.padding(12.dp)) {
Text(account.displayName, style = MaterialTheme.typography.labelLarge)
Text(account.serviceId, style = MaterialTheme.typography.bodySmall)
}
}
} }
} }
} }

View File

@@ -2,32 +2,45 @@ package com.omixlab.lckcontrol.ui.dashboard
import androidx.lifecycle.ViewModel import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope import androidx.lifecycle.viewModelScope
import com.omixlab.lckcontrol.data.repository.AccountRepository import com.omixlab.lckcontrol.data.remote.LckApiService
import com.omixlab.lckcontrol.data.repository.StreamPlanRepository import com.omixlab.lckcontrol.data.repository.StreamPlanRepository
import com.omixlab.lckcontrol.shared.LinkedAccount
import com.omixlab.lckcontrol.shared.StreamPlan import com.omixlab.lckcontrol.shared.StreamPlan
import dagger.hilt.android.lifecycle.HiltViewModel import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.SharingStarted import kotlinx.coroutines.flow.SharingStarted
import kotlinx.coroutines.flow.StateFlow import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.flow.asStateFlow
import kotlinx.coroutines.flow.stateIn import kotlinx.coroutines.flow.stateIn
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import javax.inject.Inject import javax.inject.Inject
@HiltViewModel @HiltViewModel
class DashboardViewModel @Inject constructor( class DashboardViewModel @Inject constructor(
accountRepository: AccountRepository,
private val streamPlanRepository: StreamPlanRepository, private val streamPlanRepository: StreamPlanRepository,
private val apiService: LckApiService,
) : ViewModel() { ) : ViewModel() {
val accounts: StateFlow<List<LinkedAccount>> = accountRepository.observeAccounts()
.stateIn(viewModelScope, SharingStarted.WhileSubscribed(5_000), emptyList())
val plans: StateFlow<List<StreamPlan>> = streamPlanRepository.observePlans() val plans: StateFlow<List<StreamPlan>> = streamPlanRepository.observePlans()
.stateIn(viewModelScope, SharingStarted.WhileSubscribed(5_000), emptyList()) .stateIn(viewModelScope, SharingStarted.WhileSubscribed(5_000), emptyList())
private val _backendHealthy = MutableStateFlow<Boolean?>(null)
val backendHealthy: StateFlow<Boolean?> = _backendHealthy.asStateFlow()
init { init {
viewModelScope.launch { viewModelScope.launch {
try { streamPlanRepository.syncPlans() } catch (_: Exception) {} try { streamPlanRepository.syncPlans() } catch (_: Exception) {}
} }
viewModelScope.launch {
while (true) {
_backendHealthy.value = try {
apiService.healthCheck()
true
} catch (_: Exception) {
false
}
delay(5_000)
}
}
} }
} }

View File

@@ -1,15 +1,11 @@
package com.omixlab.lckcontrol.ui.navigation package com.omixlab.lckcontrol.ui.navigation
import androidx.compose.foundation.layout.Box
import androidx.compose.foundation.layout.padding import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.size
import androidx.compose.material.icons.Icons import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.Circle
import androidx.compose.material.icons.filled.Dashboard import androidx.compose.material.icons.filled.Dashboard
import androidx.compose.material.icons.filled.Devices import androidx.compose.material.icons.filled.Devices
import androidx.compose.material.icons.filled.Person import androidx.compose.material.icons.filled.Person
import androidx.compose.material3.Icon import androidx.compose.material3.Icon
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.NavigationBar import androidx.compose.material3.NavigationBar
import androidx.compose.material3.NavigationBarItem import androidx.compose.material3.NavigationBarItem
import androidx.compose.material3.Scaffold import androidx.compose.material3.Scaffold
@@ -17,13 +13,8 @@ import androidx.compose.material3.Text
import androidx.compose.runtime.Composable import androidx.compose.runtime.Composable
import androidx.compose.runtime.LaunchedEffect import androidx.compose.runtime.LaunchedEffect
import androidx.compose.runtime.getValue import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.remember
import androidx.compose.runtime.setValue
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.vector.ImageVector import androidx.compose.ui.graphics.vector.ImageVector
import androidx.compose.ui.unit.dp
import androidx.navigation.NavGraph.Companion.findStartDestination import androidx.navigation.NavGraph.Companion.findStartDestination
import androidx.navigation.NavType import androidx.navigation.NavType
import androidx.navigation.compose.NavHost import androidx.navigation.compose.NavHost
@@ -39,7 +30,6 @@ import com.omixlab.lckcontrol.ui.dashboard.DashboardScreen
import com.omixlab.lckcontrol.ui.login.LoginScreen import com.omixlab.lckcontrol.ui.login.LoginScreen
import com.omixlab.lckcontrol.ui.plans.CreatePlanScreen import com.omixlab.lckcontrol.ui.plans.CreatePlanScreen
import com.omixlab.lckcontrol.ui.plans.PlanDetailScreen import com.omixlab.lckcontrol.ui.plans.PlanDetailScreen
import kotlinx.coroutines.delay
private data class BottomNavItem( private data class BottomNavItem(
val screen: Screen, val screen: Screen,
@@ -62,22 +52,6 @@ fun AppNavigation(tokenStore: TokenStore, apiService: LckApiService) {
val showBottomBar = currentRoute in bottomNavItems.map { it.screen.route } val showBottomBar = currentRoute in bottomNavItems.map { it.screen.route }
val startDestination = if (tokenStore.isLoggedIn()) Screen.Dashboard.route else Screen.Login.route val startDestination = if (tokenStore.isLoggedIn()) Screen.Dashboard.route else Screen.Login.route
// Backend health state
var backendHealthy by remember { mutableStateOf<Boolean?>(null) }
// Poll backend health every 5 seconds
LaunchedEffect(Unit) {
while (true) {
backendHealthy = try {
apiService.healthCheck()
true
} catch (_: Exception) {
false
}
delay(5_000)
}
}
// Session validation on app open — if we think we're logged in, verify it // Session validation on app open — if we think we're logged in, verify it
LaunchedEffect(Unit) { LaunchedEffect(Unit) {
if (tokenStore.isLoggedIn()) { if (tokenStore.isLoggedIn()) {
@@ -101,24 +75,7 @@ fun AppNavigation(tokenStore: TokenStore, apiService: LckApiService) {
bottomNavItems.forEach { item -> bottomNavItems.forEach { item ->
NavigationBarItem( NavigationBarItem(
icon = { icon = {
if (item.screen == Screen.Dashboard && backendHealthy != null) { Icon(item.icon, contentDescription = item.label)
Box {
Icon(item.icon, contentDescription = item.label)
Icon(
Icons.Default.Circle,
contentDescription = if (backendHealthy == true) "Backend healthy" else "Backend unreachable",
tint = if (backendHealthy == true)
MaterialTheme.colorScheme.primary
else
MaterialTheme.colorScheme.error,
modifier = Modifier
.size(8.dp)
.align(Alignment.TopEnd),
)
}
} else {
Icon(item.icon, contentDescription = item.label)
}
}, },
label = { Text(item.label) }, label = { Text(item.label) },
selected = currentRoute == item.screen.route, selected = currentRoute == item.screen.route,

View File

@@ -20,6 +20,7 @@ import androidx.compose.material3.ElevatedCard
import androidx.compose.material3.ExperimentalMaterial3Api import androidx.compose.material3.ExperimentalMaterial3Api
import androidx.compose.material3.ExposedDropdownMenuBox import androidx.compose.material3.ExposedDropdownMenuBox
import androidx.compose.material3.ExposedDropdownMenuDefaults import androidx.compose.material3.ExposedDropdownMenuDefaults
import androidx.compose.material3.FilterChip
import androidx.compose.material3.Icon import androidx.compose.material3.Icon
import androidx.compose.material3.IconButton import androidx.compose.material3.IconButton
import androidx.compose.material3.MaterialTheme import androidx.compose.material3.MaterialTheme
@@ -51,6 +52,8 @@ fun CreatePlanScreen(
viewModel: CreatePlanViewModel = hiltViewModel(), viewModel: CreatePlanViewModel = hiltViewModel(),
) { ) {
val planName by viewModel.planName.collectAsStateWithLifecycle() val planName by viewModel.planName.collectAsStateWithLifecycle()
val executionMode by viewModel.executionMode.collectAsStateWithLifecycle()
val gameId by viewModel.gameId.collectAsStateWithLifecycle()
val destinations by viewModel.destinations.collectAsStateWithLifecycle() val destinations by viewModel.destinations.collectAsStateWithLifecycle()
val linkedAccounts by viewModel.linkedAccounts.collectAsStateWithLifecycle() val linkedAccounts by viewModel.linkedAccounts.collectAsStateWithLifecycle()
val isCreating by viewModel.isCreating.collectAsStateWithLifecycle() val isCreating by viewModel.isCreating.collectAsStateWithLifecycle()
@@ -95,6 +98,46 @@ fun CreatePlanScreen(
) )
} }
item {
Spacer(Modifier.height(8.dp))
Text("Execution Mode", style = MaterialTheme.typography.titleMedium)
Spacer(Modifier.height(4.dp))
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(8.dp),
) {
FilterChip(
selected = executionMode == "IN_GAME",
onClick = { viewModel.setExecutionMode("IN_GAME") },
label = { Text("In-Game") },
)
FilterChip(
selected = executionMode == "APP_STREAMING",
onClick = { viewModel.setExecutionMode("APP_STREAMING") },
label = { Text("App Streaming") },
)
}
if (executionMode == "APP_STREAMING") {
Spacer(Modifier.height(4.dp))
Text(
"The app encodes and streams. Stream keys stay secure.",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant,
)
}
}
item {
OutlinedTextField(
value = gameId,
onValueChange = viewModel::setGameId,
label = { Text("Game Package ID") },
placeholder = { Text("com.example.game") },
modifier = Modifier.fillMaxWidth(),
singleLine = true,
)
}
item { item {
Spacer(Modifier.height(8.dp)) Spacer(Modifier.height(8.dp))
Row( Row(

View File

@@ -37,6 +37,12 @@ class CreatePlanViewModel @Inject constructor(
private val _planName = MutableStateFlow("") private val _planName = MutableStateFlow("")
val planName: StateFlow<String> = _planName.asStateFlow() val planName: StateFlow<String> = _planName.asStateFlow()
private val _executionMode = MutableStateFlow("IN_GAME")
val executionMode: StateFlow<String> = _executionMode.asStateFlow()
private val _gameId = MutableStateFlow("")
val gameId: StateFlow<String> = _gameId.asStateFlow()
private val _destinations = MutableStateFlow<List<DestinationInput>>(emptyList()) private val _destinations = MutableStateFlow<List<DestinationInput>>(emptyList())
val destinations: StateFlow<List<DestinationInput>> = _destinations.asStateFlow() val destinations: StateFlow<List<DestinationInput>> = _destinations.asStateFlow()
@@ -50,6 +56,14 @@ class CreatePlanViewModel @Inject constructor(
_planName.value = name _planName.value = name
} }
fun setExecutionMode(mode: String) {
_executionMode.value = mode
}
fun setGameId(gameId: String) {
_gameId.value = gameId
}
fun addDestination() { fun addDestination() {
_destinations.value = _destinations.value + DestinationInput() _destinations.value = _destinations.value + DestinationInput()
} }
@@ -100,7 +114,7 @@ class CreatePlanViewModel @Inject constructor(
tags = input.tags.split(",").map { it.trim() }.filter { it.isNotBlank() }, tags = input.tags.split(",").map { it.trim() }.filter { it.isNotBlank() },
) )
} }
val plan = streamPlanRepository.createPlan(name, streamDests) val plan = streamPlanRepository.createPlan(name, streamDests, _executionMode.value, _gameId.value)
onCreated(plan.planId) onCreated(plan.planId)
} catch (e: Exception) { } catch (e: Exception) {
_error.value = e.message ?: "Failed to create plan" _error.value = e.message ?: "Failed to create plan"

View File

@@ -40,6 +40,7 @@ import androidx.compose.ui.unit.dp
import androidx.hilt.navigation.compose.hiltViewModel import androidx.hilt.navigation.compose.hiltViewModel
import androidx.lifecycle.compose.collectAsStateWithLifecycle import androidx.lifecycle.compose.collectAsStateWithLifecycle
import com.omixlab.lckcontrol.shared.StreamDestination import com.omixlab.lckcontrol.shared.StreamDestination
import com.omixlab.lckcontrol.streaming.StreamingState
@OptIn(ExperimentalMaterial3Api::class) @OptIn(ExperimentalMaterial3Api::class)
@Composable @Composable
@@ -51,6 +52,8 @@ fun PlanDetailScreen(
val plan by viewModel.plan.collectAsStateWithLifecycle() val plan by viewModel.plan.collectAsStateWithLifecycle()
val isLoading by viewModel.isLoading.collectAsStateWithLifecycle() val isLoading by viewModel.isLoading.collectAsStateWithLifecycle()
val error by viewModel.error.collectAsStateWithLifecycle() val error by viewModel.error.collectAsStateWithLifecycle()
val streamingState by viewModel.streamingState.collectAsStateWithLifecycle()
val streamingStats by viewModel.streamingStats.collectAsStateWithLifecycle()
val snackbarHostState = remember { SnackbarHostState() } val snackbarHostState = remember { SnackbarHostState() }
LaunchedEffect(error) { LaunchedEffect(error) {
@@ -122,6 +125,45 @@ fun PlanDetailScreen(
} }
} }
// Execution mode
item {
ElevatedCard(modifier = Modifier.fillMaxWidth()) {
Column(modifier = Modifier.padding(16.dp)) {
Text("Execution Mode", style = MaterialTheme.typography.labelMedium)
Spacer(Modifier.height(4.dp))
Text(
when (currentPlan.executionMode) {
"APP_STREAMING" -> "App Streaming"
else -> "In-Game"
},
style = MaterialTheme.typography.bodyMedium,
)
}
}
}
// Game ID
if (currentPlan.gameId.isNotBlank()) {
item {
ElevatedCard(modifier = Modifier.fillMaxWidth()) {
Column(modifier = Modifier.padding(16.dp)) {
Text("Game", style = MaterialTheme.typography.labelMedium)
Spacer(Modifier.height(4.dp))
Text(currentPlan.gameId, style = MaterialTheme.typography.bodyMedium)
}
}
}
}
// Streaming stats (only for APP_STREAMING + LIVE)
if (currentPlan.executionMode == "APP_STREAMING" &&
currentPlan.status == "LIVE" &&
streamingState == StreamingState.LIVE) {
item {
StreamingStatsCard(stats = streamingStats)
}
}
// Action buttons // Action buttons
item { item {
when (currentPlan.status) { when (currentPlan.status) {

View File

@@ -5,6 +5,9 @@ import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope import androidx.lifecycle.viewModelScope
import com.omixlab.lckcontrol.data.repository.StreamPlanRepository import com.omixlab.lckcontrol.data.repository.StreamPlanRepository
import com.omixlab.lckcontrol.shared.StreamPlan import com.omixlab.lckcontrol.shared.StreamPlan
import com.omixlab.lckcontrol.streaming.StreamingManager
import com.omixlab.lckcontrol.streaming.StreamingState
import com.omixlab.lckcontrol.streaming.StreamingStats
import dagger.hilt.android.lifecycle.HiltViewModel import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.flow.MutableStateFlow import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.SharingStarted import kotlinx.coroutines.flow.SharingStarted
@@ -18,6 +21,7 @@ import javax.inject.Inject
class PlanDetailViewModel @Inject constructor( class PlanDetailViewModel @Inject constructor(
savedStateHandle: SavedStateHandle, savedStateHandle: SavedStateHandle,
private val streamPlanRepository: StreamPlanRepository, private val streamPlanRepository: StreamPlanRepository,
private val streamingManager: StreamingManager,
) : ViewModel() { ) : ViewModel() {
private val planId: String = savedStateHandle["planId"] ?: "" private val planId: String = savedStateHandle["planId"] ?: ""
@@ -32,6 +36,9 @@ class PlanDetailViewModel @Inject constructor(
} }
} }
val streamingState: StateFlow<StreamingState> = streamingManager.state
val streamingStats: StateFlow<StreamingStats> = streamingManager.stats
private val _isLoading = MutableStateFlow(false) private val _isLoading = MutableStateFlow(false)
val isLoading: StateFlow<Boolean> = _isLoading.asStateFlow() val isLoading: StateFlow<Boolean> = _isLoading.asStateFlow()

View File

@@ -0,0 +1,52 @@
package com.omixlab.lckcontrol.ui.plans
import androidx.compose.foundation.layout.Arrangement
import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.Row
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.padding
import androidx.compose.material3.ElevatedCard
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.ui.Modifier
import androidx.compose.ui.unit.dp
import com.omixlab.lckcontrol.streaming.StreamingStats
@Composable
fun StreamingStatsCard(stats: StreamingStats) {
ElevatedCard(modifier = Modifier.fillMaxWidth()) {
Column(
modifier = Modifier.padding(16.dp),
verticalArrangement = Arrangement.spacedBy(8.dp),
) {
Text("Streaming Stats", style = MaterialTheme.typography.titleSmall)
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween,
) {
StatItem("Video", formatBitrate(stats.videoBitrate))
StatItem("Audio", formatBitrate(stats.audioBitrate))
StatItem("FPS", "${stats.fps}")
StatItem("Dropped", "${stats.droppedFrames}")
}
}
}
}
@Composable
private fun StatItem(label: String, value: String) {
Column {
Text(label, style = MaterialTheme.typography.labelSmall)
Text(value, style = MaterialTheme.typography.bodyMedium)
}
}
private fun formatBitrate(bps: Long): String {
return when {
bps >= 1_000_000 -> "%.1f Mbps".format(bps / 1_000_000.0)
bps >= 1_000 -> "%.0f kbps".format(bps / 1_000.0)
else -> "$bps bps"
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,745 @@
# LIV Control Center (Hub) vs LCK Control (Companion App)
## Comprehensive Architecture Comparison & Unification Strategy
---
## 1. Executive Summary
LIV has two parallel applications for managing game streaming on Quest:
| | **Hub** (`liv-control-center`) | **Control** (`lck-control`) |
|---|---|---|
| **Stage** | Production (Quest Store, low reviews) | Prototype (new architecture) |
| **Stack** | Rust + Tauri + Leptos (WASM) | Kotlin + Jetpack Compose + Hilt |
| **Streaming** | App captures screen & encodes | Game encodes directly from render pipeline |
| **Communication** | Async via backend server | Synchronous IPC via AIDL |
| **Destinations** | Single target | Multi-destination |
| **UE5 Plugin** | `LCKStreaming` (HTTP/JSON-RPC) | `LCKControl` (AIDL/JNI) |
---
## 2. High-Level Architecture
### 2.1 Hub Architecture
```mermaid
graph TB
subgraph Quest Headset
subgraph "Hub App (Tauri + Leptos WASM)"
UI_H["Leptos UI<br/>(WASM)"]
Core_H["Rust Core<br/>(Tauri Backend)"]
Encoder_H["MediaCodec<br/>H.264 + AAC"]
RTMP_H["minirtmp<br/>(RTMP Client)"]
Capture["ScreenCaptureService<br/>(MediaProjection)"]
end
subgraph "UE5 Game"
Plugin_S["LCKStreaming Plugin"]
API_Client["HTTP/JSON-RPC Client"]
end
end
subgraph "Cloud Server"
Backend_H["Hub Backend<br/>(api.obi.gg)"]
end
subgraph "Streaming Platforms"
YT["YouTube Live"]
TW["Twitch"]
end
UI_H <-->|Tauri IPC| Core_H
Core_H -->|JSON-RPC 2.0<br/>HTTPS + Cert Pinning| Backend_H
Plugin_S -->|JSON-RPC 2.0<br/>HTTPS| Backend_H
Backend_H -->|"Device Pairing<br/>(async polling)"| Plugin_S
Core_H --> Capture
Capture --> Encoder_H
Encoder_H --> RTMP_H
RTMP_H -->|RTMP| YT
RTMP_H -->|RTMP| TW
style Backend_H fill:#f96,stroke:#333
style Capture fill:#ff9,stroke:#333
style Encoder_H fill:#ff9,stroke:#333
```
**Key: The Hub app captures the screen, encodes it, and streams. The game and hub communicate indirectly through the backend server.**
### 2.2 Control App Architecture
```mermaid
graph TB
subgraph Quest Headset
subgraph "Control App (Kotlin + Compose)"
UI_C["Compose UI"]
VM["ViewModels + Repos"]
Service["LckControlService<br/>(Foreground + AIDL)"]
DB["Room DB<br/>(Local Cache)"]
end
subgraph "UE5 Game"
Plugin_C["LCKControl Plugin"]
JNI["JNI Bridge"]
SDK["lck-control-sdk<br/>(AAR)"]
Encoder_C["LCK Encoder<br/>(H.264 + AAC)"]
RTMP_C1["RTMP Sink 1"]
RTMP_C2["RTMP Sink 2"]
RTMP_CN["RTMP Sink N"]
end
end
subgraph "Self-Hosted Server"
Backend_C["Control Backend<br/>(Node.js + Fastify)"]
SQLite["SQLite DB"]
end
subgraph "Streaming Platforms"
YT2["YouTube Live"]
TW2["Twitch"]
Manual["Custom RTMP"]
end
UI_C <--> VM
VM <-->|REST API<br/>JWT Auth| Backend_C
VM <--> DB
VM <--> Service
Plugin_C --> JNI
JNI --> SDK
SDK <-->|"AIDL IPC<br/>(Bound Service)"| Service
Backend_C <--> SQLite
Backend_C -->|"OAuth + RTMP<br/>Resolution"| YT2
Backend_C -->|"OAuth + RTMP<br/>Resolution"| TW2
Encoder_C --> RTMP_C1
Encoder_C --> RTMP_C2
Encoder_C --> RTMP_CN
RTMP_C1 -->|RTMP| YT2
RTMP_C2 -->|RTMP| TW2
RTMP_CN -->|RTMP| Manual
style Service fill:#9f9,stroke:#333
style SDK fill:#9f9,stroke:#333
style Encoder_C fill:#9cf,stroke:#333
```
**Key: The game encodes directly from its render pipeline and streams to multiple destinations. The companion app provides stream configuration via direct IPC.**
---
## 3. Communication Model Comparison
### 3.1 Hub: Server-Mediated Async Communication
```mermaid
sequenceDiagram
participant Game as UE5 Game<br/>(LCKStreaming)
participant Server as Hub Backend<br/>(api.obi.gg)
participant Hub as Hub App<br/>(Tauri)
participant Platform as YouTube/Twitch
Note over Game,Hub: Device Pairing (one-time)
Game->>Server: create_device_login_attempt()
Server-->>Game: 6-digit pairing code
Game->>Game: Display code to user
Hub->>Server: pair_device(code)
Server-->>Hub: Device paired
Note over Game,Hub: Stream Setup
Hub->>Server: get_user_profile()
Server-->>Hub: Streaming target + RTMP URL
Hub->>Hub: Start screen capture
Hub->>Hub: Encode H.264 + AAC
Hub->>Platform: RTMP stream
Note over Game,Server: Game has no direct<br/>connection to Hub
Game->>Server: Poll for updates (2.5s)
Server-->>Game: Current state
```
### 3.2 Control: Direct IPC Communication
```mermaid
sequenceDiagram
participant Game as UE5 Game<br/>(LCKControl + JNI)
participant App as Control App<br/>(AIDL Service)
participant Server as Control Backend<br/>(Fastify)
participant Platform as YouTube/Twitch
Note over Game,App: Service Binding (direct)
Game->>App: bindService() via AIDL
App-->>Game: ILckControlService binder
Game->>App: registerAsClient("MyGame", pkg)
App-->>Game: clientId
Note over Game,Platform: Stream Lifecycle
Game->>App: getStreamPlans()
App-->>Game: List<StreamPlan>
Game->>App: prepareStreamPlan(planId)
App->>Server: POST /streams/plans/{id}/prepare
Server->>Platform: Create broadcast + get RTMP URLs
Platform-->>Server: RTMP URLs + stream keys
Server-->>App: PrepareResponse
App-->>Game: StreamPlan (with RTMP data)
Game->>Game: Encode from render pipeline
Game->>Platform: RTMP stream (dest 1)
Game->>Platform: RTMP stream (dest 2)
Game->>App: startStreamPlan(planId)
App->>Server: POST /streams/plans/{id}/start
Server->>Platform: Transition broadcast to LIVE
```
---
## 4. Technology Stack Comparison
### 4.1 Application Layer
| Component | Hub | Control |
|-----------|-----|---------|
| **Language** | Rust (95%) + Kotlin (JNI) | Kotlin (100%) |
| **UI Framework** | Leptos 0.8.2 (Rust WASM) | Jetpack Compose (2024.09 BOM) |
| **App Framework** | Tauri v2.6.2 | Native Android |
| **Styling** | TailwindCSS v4 | Material Design 3 |
| **State Mgmt** | Leptos reactive signals | StateFlow + collectAsStateWithLifecycle |
| **DI** | None (manual wiring) | Hilt 2.59.2 |
| **Navigation** | Leptos Router | Compose Navigation 2.8.4 |
| **Local Storage** | Platform credential store | Room 2.8.4 + EncryptedSharedPreferences |
| **HTTP Client** | reqwest (rustls TLS) | Retrofit 2.11.0 + OkHttp 4.12.0 |
| **JSON** | serde_json | Moshi 1.15.1 |
| **Auth SDK** | Meta Horizon Platform SDK 77.0.1 | Meta Horizon Platform SDK 77.0.1 |
| **Crash Reporting** | Sentry (Android SDK bridge) | None |
### 4.2 Backend Layer
| Component | Hub Backend | Control Backend |
|-----------|-------------|-----------------|
| **Hosting** | Cloud (`api.obi.gg`) | Self-hosted (Docker on NAS, port 3100) |
| **Protocol** | JSON-RPC 2.0 | REST (JSON) |
| **Stack** | Unknown (external) | Node.js 20 + Fastify 5 + TypeScript 5.7 |
| **Database** | Unknown | SQLite (Prisma 6.4 ORM) |
| **Auth** | JWT (via JSON-RPC response headers) | JWT HS256 (jose 6.0) |
| **Token Security** | Unknown | AES-256-GCM encryption + SHA256 hashing |
| **OAuth** | Server handles YouTube/Twitch | Server handles YouTube/Twitch |
| **Rate Limiting** | Unknown | 100 req/min (Fastify plugin) |
| **Deployment** | Managed cloud | Docker + docker-compose |
### 4.3 UE5 Plugin Layer
| Component | LCKStreaming (Hub) | LCKControl (Companion) |
|-----------|-------------------|----------------------|
| **Communication** | HTTP/JSON-RPC 2.0 | AIDL via JNI |
| **Transport** | HTTPS (cross-network) | Local IPC (same device) |
| **Auth Flow** | Device code (6-digit) + polling | Direct service binding |
| **Token Storage** | EncryptedSharedPreferences | None (companion owns tokens) |
| **RTMP Sinks** | 1 (single destination) | N (multi-destination) |
| **Blocking Model** | Async HTTP callbacks | Synchronous JNI calls |
| **Platform Support** | Cross-platform capable | Android only |
| **Latency** | Network round-trip (100ms+) | IPC (~1ms) |
| **Offline Capable** | No (requires server) | Partial (companion has local cache) |
---
## 5. Streaming Architecture Deep Dive
### 5.1 Hub: Screen Capture + Re-Encoding
```mermaid
graph LR
subgraph "UE5 Game Process"
Render["Game Renderer<br/>(GPU)"]
end
subgraph "Android OS"
FB["Framebuffer /<br/>Display Compositor"]
MP["MediaProjection<br/>(Screen Capture API)"]
end
subgraph "Hub App Process"
VD["VirtualDisplay"]
MC_V["MediaCodec<br/>(H.264 Encoder)"]
MC_A["MediaCodec<br/>(AAC Encoder)"]
AR["AudioRecord<br/>(System Audio)"]
MR["minirtmp<br/>(RTMP Client)"]
end
subgraph "CDN"
RTMP["YouTube / Twitch<br/>RTMP Ingest"]
end
Render --> FB
FB --> MP
MP --> VD
VD --> MC_V
AR --> MC_A
MC_V --> MR
MC_A --> MR
MR --> RTMP
style FB fill:#fbb,stroke:#333
style MP fill:#fbb,stroke:#333
```
**Problems:**
- Extra GPU copy through display compositor
- Re-encoding already rendered frames (quality loss)
- Higher latency (capture → encode → send)
- Higher battery/thermal impact (two encoding passes)
- Captures UI overlays, notifications, system bars
- Resolution limited to display resolution
### 5.2 Control: Direct Render Pipeline Encoding
```mermaid
graph LR
subgraph "UE5 Game Process"
Render["Game Renderer<br/>(GPU)"]
SCC["SceneCaptureComponent2D<br/>(Render Target)"]
ENC["LCK Encoder<br/>(H.264 + AAC)"]
S1["RTMP Sink 1<br/>(YouTube)"]
S2["RTMP Sink 2<br/>(Twitch)"]
S3["RTMP Sink 3<br/>(Custom)"]
end
subgraph "CDN"
YT["YouTube RTMP"]
TW["Twitch RTMP"]
CU["Custom RTMP"]
end
Render --> SCC
SCC --> ENC
ENC --> S1
ENC --> S2
ENC --> S3
S1 --> YT
S2 --> TW
S3 --> CU
style SCC fill:#bfb,stroke:#333
style ENC fill:#bfb,stroke:#333
```
**Advantages:**
- Direct GPU texture access (no compositor overhead)
- Single encode pass (game scene only, no UI clutter)
- Lower latency
- Lower battery/thermal impact
- Configurable resolution independent of display
- Multi-destination from single encode
- Clean game footage (no system overlays)
---
## 6. Feature Comparison Matrix
| Feature | Hub | Control | Notes |
|---------|:---:|:-------:|-------|
| **Meta/Quest Login** | Yes | Yes | Both use Horizon Platform SDK 77.0.1 |
| **YouTube OAuth** | Yes | Yes | Both server-side token exchange |
| **Twitch OAuth** | Yes | Yes | Both server-side token exchange |
| **Multi-Destination Streaming** | No (1) | Yes (N) | Major difference |
| **Stream Plans** | No | Yes | Control has full lifecycle management |
| **Direct Game Encoding** | No | Yes | Control encodes from render pipeline |
| **Screen Capture Streaming** | Yes | No | Hub captures and re-encodes |
| **Custom RTMP Targets** | Yes | Yes | Both support manual RTMP |
| **Game Client Management** | Yes (pairing) | Yes (AIDL) | Different mechanisms |
| **IGDB Game Database** | Yes | No | Hub has game cover art |
| **Watermark** | Yes | No | Hub has overlay support |
| **Subscription Model** | Yes | No | Hub has paid tier |
| **Sentry Crash Reporting** | Yes | No | Hub has telemetry |
| **Certificate Pinning** | Yes | No | Hub has SPKI pinning |
| **Offline Caching** | No | Yes | Control has Room DB |
| **Background Token Refresh** | Unknown | Yes | Control backend has scheduler |
| **CI/CD Pipeline** | Yes (Jenkins) | Partial (deploy.ps1) | Hub has full CI |
| **Desktop Support** | Yes | No | Tauri supports desktop |
| **Cross-Platform** | Yes (Desktop + Android) | No (Android only) | Hub has wider reach |
---
## 7. Pros and Cons
### 7.1 Hub (liv-control-center)
#### Pros
- **Cross-platform**: Tauri supports Desktop + Android, one codebase
- **Self-contained streaming**: No dependency on game integration
- **Works with any game**: Screen capture works regardless of game engine support
- **Production infrastructure**: Jenkins CI/CD, Sentry, cloud backend
- **Rich features**: IGDB, watermarks, subscription model
- **Rust performance**: Memory-safe, low-level control over encoding
#### Cons
- **Screen capture quality**: Re-encoding degrades quality, captures overlays
- **Higher resource usage**: Extra GPU copy + encode pass drains battery faster
- **Single destination**: Can only stream to one platform at a time
- **Complex stack**: Rust + WASM + Tauri + Kotlin JNI is hard to maintain
- **Server dependency**: All communication goes through cloud backend
- **Latency**: Network round-trips for game communication (polling every 2.5s)
- **Low store reviews**: Users experiencing issues (reason for this analysis)
- **Niche UI framework**: Leptos (WASM) has small ecosystem vs Compose
- **No stream plans**: Simple streaming model without plan lifecycle
### 7.2 Control App (lck-control)
#### Pros
- **Direct render pipeline**: Game encodes from GPU, best possible quality
- **Multi-destination**: Stream to YouTube + Twitch + custom simultaneously
- **Low latency IPC**: AIDL communication in ~1ms vs 100ms+ network calls
- **Stream plans**: Full lifecycle (DRAFT → READY → LIVE → ENDED)
- **Clean architecture**: Standard Android stack (Compose, Hilt, Room, Retrofit)
- **Own backend**: Full control over API, auth, token management
- **SDK module**: Clean AAR for UE5 consumption via JNI
- **Lower resource usage**: No screen capture or re-encoding overhead
- **Maintainable**: Kotlin + Compose is mainstream Android with large ecosystem
- **Offline caching**: Room DB + encrypted token store
#### Cons
- **Android only**: No desktop support
- **Requires game integration**: Game must use LCKControl plugin (not universal)
- **Prototype stage**: Not production-ready yet
- **Self-hosted backend**: Requires infrastructure management (Docker on NAS)
- **No CI/CD**: Manual builds via PowerShell script
- **No crash reporting**: No Sentry or equivalent
- **No subscription model**: No monetization built in
- **No IGDB integration**: No game metadata/artwork
- **Blocking IPC**: Synchronous JNI calls could cause ANRs if slow
---
## 8. UE5 Plugin Comparison
### 8.1 LCKStreaming Plugin (uses Hub)
```mermaid
stateDiagram-v2
[*] --> Idle
Idle --> LoggingIn: StartLogin()
LoggingIn --> WaitingForCode: create_device_login_attempt
WaitingForCode --> Polling: Display 6-digit code
Polling --> Authenticated: check_device_login_attempt<br/>(every 2.5s)
Polling --> Polling: Not yet paired
Authenticated --> FetchingProfile: get_user_profile
FetchingProfile --> Ready: Got RTMP target
Ready --> Streaming: StartStreaming()
Streaming --> Ready: StopStreaming()
Ready --> Idle: Logout()
note right of Polling
User must manually enter
code in Hub app or website
end note
```
**Architecture:**
- `ULCKStreamingSubsystem` — GameInstance subsystem, owns API client + RTMP sink
- `FLCKStreamingApiClient` — HTTP client, JSON-RPC 2.0, cert pinning
- `FLCKRtmpSink` / `FLCKRtmpClient` — Single RTMP connection via librtmp
- Auth token stored in platform credential store
- Single streaming target resolved by backend
### 8.2 LCKControl Plugin (uses Companion App)
```mermaid
stateDiagram-v2
[*] --> Disconnected
Disconnected --> Connecting: ConnectToCompanionApp()
Connecting --> Connected: AIDL service bound<br/>(poll every 1s)
Connected --> HasPlans: GetStreamPlans()
HasPlans --> Prepared: PrepareStreamPlan(planId)<br/>→ RTMP URLs resolved
Prepared --> Streaming: StartStreamPlan(planId)<br/>+ Attach N RTMP sinks
Streaming --> Prepared: EndStreamPlan(planId)
Connected --> Disconnected: DisconnectFromCompanionApp()
note right of Connected
Direct AIDL binding,
no pairing code needed
end note
note right of Streaming
Multiple RTMP sinks active
simultaneously
end note
```
**Architecture:**
- `ULCKControlSubsystem` — GameInstance subsystem, owns JNI bridge + multiple RTMP sinks
- `LCKControlAndroid.cpp` — ~700 lines of JNI bindings to `LckControlClient` (AAR)
- Multiple `FLCKRtmpSink` instances — one per stream destination
- No token management — companion app handles all auth
- Full stream plan lifecycle control
### 8.3 Shared Infrastructure (LCK Base Plugin)
Both plugins share:
- `ILCKStreamingFeature` — Common interface (StartLogin, StartStreaming, StopStreaming, etc.)
- `ILCKEncoderFactory` — Encoder creation
- `ULCKRecorderSubsystem` — Encoder lifecycle management
- `FLCKRtmpSink` / `FLCKRtmpClient` — RTMP transport layer
- H.264 + AAC encoding via platform-specific backends (NVCodec, MediaCodec)
---
## 9. Backend Comparison
### 9.1 Hub Backend (`api.obi.gg`)
```mermaid
graph TB
subgraph "Cloud (Managed)"
API_H["Hub Backend API"]
DB_H["Database<br/>(Unknown)"]
IGDB["IGDB API<br/>(Game Metadata)"]
end
Hub["Hub App"] -->|"JSON-RPC 2.0<br/>POST /api/rpc"| API_H
Game_S["LCKStreaming<br/>Plugin"] -->|"JSON-RPC 2.0<br/>POST /api/rpc"| API_H
API_H --> DB_H
API_H --> IGDB
style API_H fill:#f96,stroke:#333
```
**Known RPC Methods:**
- `LoginUser`, `RefreshUser` — Auth
- `ListMyStreamingTargets`, `CreateStreamingTarget`, `UpdateStreamingTarget`, `DeleteStreamingTarget` — Targets
- `PairDevice`, `UnpairDevice`, `GetConnectedGames` — Device management
- `StartStreaming`, `StopStreaming` — Stream events
- `SearchIgdbGames` — Game metadata
- `CreateOauthConnectIntent`, `GetOauthConnectIntent` — OAuth
### 9.2 Control Backend (`lck-control-backend`)
```mermaid
graph TB
subgraph "Self-Hosted (Docker on NAS)"
API_C["Fastify 5 API<br/>(TypeScript)"]
Prisma["Prisma 6.4 ORM"]
SQLite["SQLite DB"]
Scheduler["Token Refresh<br/>Scheduler (10min)"]
end
App["Control App"] -->|"REST API<br/>JWT Bearer Auth"| API_C
API_C --> Prisma --> SQLite
Scheduler --> API_C
API_C -->|OAuth| Google["Google OAuth"]
API_C -->|OAuth| Twitch_API["Twitch OAuth"]
API_C -->|Nonce Validate| Meta_Graph["Meta Graph API"]
API_C -->|Live API| YT_API["YouTube Live API"]
API_C -->|Helix API| TW_API["Twitch Helix API"]
style API_C fill:#9cf,stroke:#333
```
**REST Endpoints:**
| Group | Endpoints |
|-------|-----------|
| Auth | `POST /auth/meta/callback`, `POST /auth/refresh`, `GET /auth/me`, `POST /auth/logout` |
| Providers | `GET /providers/accounts`, `GET /providers/{yt\|tw}/auth-url`, `POST /providers/{yt\|tw}/callback`, `DELETE /providers/:serviceId` |
| Streams | `GET /streams/plans`, `POST /streams/plans`, `GET /streams/plans/:id`, `DELETE /streams/plans/:id` |
| Lifecycle | `POST /streams/plans/:id/prepare`, `POST /streams/plans/:id/start`, `POST /streams/plans/:id/end` |
---
## 10. Data Flow Comparison
### 10.1 Hub: Centralized Server Model
```mermaid
graph LR
subgraph "Data Ownership"
direction TB
Server_H["Hub Backend<br/>(owns ALL data)"]
end
Hub_App["Hub App<br/>(thin client)"] <-->|"All state via<br/>JSON-RPC"| Server_H
Game_H["UE5 Game<br/>(paired device)"] <-->|"All state via<br/>JSON-RPC"| Server_H
YT_H["YouTube API"] <--> Server_H
TW_H["Twitch API"] <--> Server_H
style Server_H fill:#f96,stroke:#333
```
- **Single source of truth**: Backend server
- **No local cache**: App relies on network for all state
- **Game is decoupled**: Only communicates with server, never with app
- **Offline = broken**: Cannot function without server connectivity
### 10.2 Control: Distributed Ownership Model
```mermaid
graph LR
subgraph "Data Ownership"
direction TB
Server_C["Control Backend<br/>(tokens, plans,<br/>OAuth)"]
App_C["Control App<br/>(local cache,<br/>session tokens)"]
Game_C["UE5 Game<br/>(RTMP streams)"]
end
App_C <-->|REST API| Server_C
Game_C <-->|"AIDL IPC<br/>(stream plans,<br/>RTMP config)"| App_C
Server_C <--> YT_C["YouTube API"]
Server_C <--> TW_C["Twitch API"]
Game_C -->|"RTMP<br/>(direct)"| CDN["YouTube / Twitch<br/>RTMP Ingest"]
style App_C fill:#9f9,stroke:#333
style Game_C fill:#9cf,stroke:#333
```
- **Distributed state**: Backend (tokens, plans), App (cache, session), Game (streams)
- **Local caching**: Room DB provides offline access to plans and accounts
- **Game is tightly coupled**: Direct IPC with companion app
- **Partial offline**: Can view cached plans without network
---
## 11. Unification Strategy
### 11.1 Recommended Direction: Evolve Control App into Production
The Control architecture is fundamentally superior for game streaming because:
1. **Direct encode > screen capture** — Quality, performance, and battery life
2. **Multi-destination > single target** — Key user-facing feature
3. **IPC > server polling** — Reliability and responsiveness
4. **Stream plans > ad-hoc streaming** — Better UX for recurring setups
5. **Standard Android stack > Rust/WASM** — Easier maintenance and hiring
### 11.2 Migration Roadmap
```mermaid
gantt
title Unification Roadmap
dateFormat YYYY-MM-DD
axisFormat %b %Y
section Phase 1: Production Readiness
CI/CD pipeline (Jenkins/GH Actions) :p1a, 2026-03-01, 14d
Sentry crash reporting :p1b, 2026-03-01, 7d
Backend deploy to cloud :p1c, 2026-03-08, 7d
Certificate pinning (OkHttp) :p1d, 2026-03-08, 3d
section Phase 2: Feature Parity
IGDB game metadata integration :p2a, 2026-03-15, 7d
Watermark / overlay support in encoder :p2b, 2026-03-15, 10d
Subscription model + paywall :p2c, 2026-03-22, 14d
section Phase 3: Hub Migration
Add fallback screen-capture mode :p3a, 2026-04-05, 14d
Port device pairing (for non-integrated games) :p3b, 2026-04-05, 10d
Migrate Hub users to Control :p3c, 2026-04-19, 14d
Deprecate Hub app :p3d, 2026-05-03, 7d
section Phase 4: Polish
Desktop companion (optional) :p4a, 2026-05-10, 21d
Advanced stream analytics :p4b, 2026-05-10, 14d
Store listing + marketing :p4c, 2026-05-24, 7d
```
### 11.3 What to Keep from Each
```mermaid
graph TB
subgraph "Unified App"
direction TB
A["Control App Architecture<br/>(Kotlin + Compose + Hilt)"]
B["Control Backend<br/>(Fastify + Prisma + SQLite)"]
C["LCKControl Plugin<br/>(AIDL + multi-destination)"]
D["Stream Plan System<br/>(DRAFT → READY → LIVE → ENDED)"]
end
subgraph "Adopt from Hub"
E["Sentry Crash Reporting"]
F["IGDB Game Database"]
G["Certificate Pinning"]
H["Jenkins CI/CD"]
I["Watermark Renderer"]
J["Screen Capture Fallback"]
end
subgraph "Discard"
K["Rust/Tauri/Leptos Stack"]
L["JSON-RPC 2.0 Protocol"]
M["minirtmp (Rust RTMP)"]
N["Device Code Pairing<br/>(replaced by AIDL)"]
O["Single-Destination Limit"]
end
E --> A
F --> B
G --> A
H --> A
I --> C
J --> A
style A fill:#9f9,stroke:#333
style B fill:#9cf,stroke:#333
style C fill:#9f9,stroke:#333
style D fill:#9f9,stroke:#333
style K fill:#fbb,stroke:#333
style L fill:#fbb,stroke:#333
style M fill:#fbb,stroke:#333
style N fill:#fbb,stroke:#333
style O fill:#fbb,stroke:#333
```
### 11.4 Hybrid Mode: Screen Capture Fallback
To maintain the Hub's "works with any game" advantage, add a fallback path:
```mermaid
graph TB
Start["Game Launches"] --> Check{"LCKControl Plugin<br/>integrated?"}
Check -->|Yes| AIDL["AIDL IPC Path<br/>(direct encode,<br/>multi-destination)"]
Check -->|No| Capture["Screen Capture Path<br/>(MediaProjection,<br/>single destination)"]
AIDL --> Stream["Stream to Platforms"]
Capture --> Stream
style AIDL fill:#9f9,stroke:#333
style Capture fill:#ff9,stroke:#333
```
This gives the unified app both modes:
- **Primary**: Direct encoding via AIDL (high quality, multi-destination)
- **Fallback**: Screen capture for games without plugin integration (compatibility)
---
## 12. Risk Assessment
| Risk | Impact | Mitigation |
|------|--------|------------|
| Hub users lose access during migration | High | Run both apps in parallel during transition, provide migration guide |
| AIDL only works on Android (no desktop) | Medium | Screen capture fallback for desktop; evaluate PCVR needs later |
| Self-hosted backend scalability | Medium | Move to managed cloud (Railway, Fly.io) before store launch |
| Synchronous JNI blocking causes ANR | Medium | Add timeout handling, move to async callback pattern |
| No subscription model in Control | Low | Implement before store launch using existing Hub billing logic |
| Losing crash telemetry | Low | Add Sentry SDK early in Phase 1 |
---
## 13. Summary Decision Matrix
```mermaid
quadrantChart
title Streaming Quality vs Maintenance Complexity
x-axis Low Maintenance --> High Maintenance
y-axis Low Quality --> High Quality
quadrant-1 Ideal
quadrant-2 Overengineered
quadrant-3 Avoid
quadrant-4 Quick & Dirty
Control App: [0.35, 0.85]
Hub App: [0.75, 0.45]
Unified - Recommended: [0.45, 0.90]
```
**Recommendation**: The Control App architecture with adopted Hub features provides the best path forward — higher streaming quality with a more maintainable stack. The Hub's Rust/Tauri/Leptos stack adds significant complexity without proportional benefits for an Android-focused product.
---
*Document generated 2026-02-26. Based on analysis of `liv-control-center`, `lck-control`, `lck-control-backend`, and `LCKGame` codebases.*

Binary file not shown.

View File

@@ -4,10 +4,14 @@ import android.content.ComponentName
import android.content.Context import android.content.Context
import android.content.Intent import android.content.Intent
import android.content.ServiceConnection import android.content.ServiceConnection
import android.hardware.HardwareBuffer
import android.os.IBinder import android.os.IBinder
import android.os.ParcelFileDescriptor
import com.omixlab.lckcontrol.shared.ConnectedClientInfo import com.omixlab.lckcontrol.shared.ConnectedClientInfo
import com.omixlab.lckcontrol.shared.ILckControlCallback import com.omixlab.lckcontrol.shared.ILckControlCallback
import com.omixlab.lckcontrol.shared.ILckControlService import com.omixlab.lckcontrol.shared.ILckControlService
import com.omixlab.lckcontrol.shared.ILckStreamingCallback
import com.omixlab.lckcontrol.shared.ILckStreamingService
import com.omixlab.lckcontrol.shared.LinkedAccount import com.omixlab.lckcontrol.shared.LinkedAccount
import com.omixlab.lckcontrol.shared.StreamPlan import com.omixlab.lckcontrol.shared.StreamPlan
import com.omixlab.lckcontrol.shared.StreamPlanConfig import com.omixlab.lckcontrol.shared.StreamPlanConfig
@@ -21,9 +25,11 @@ class LckControlClient(private val context: Context) {
private const val SERVICE_PACKAGE = "com.omixlab.lckcontrol" private const val SERVICE_PACKAGE = "com.omixlab.lckcontrol"
private const val SERVICE_CLASS = "$SERVICE_PACKAGE.service.LckControlService" private const val SERVICE_CLASS = "$SERVICE_PACKAGE.service.LckControlService"
private const val PERMISSION = "$SERVICE_PACKAGE.permission.USE_LCK_CONTROL" private const val PERMISSION = "$SERVICE_PACKAGE.permission.USE_LCK_CONTROL"
private const val ACTION_BIND_STREAMING = "$SERVICE_PACKAGE.BIND_STREAMING"
} }
private var service: ILckControlService? = null private var service: ILckControlService? = null
private var streamingService: ILckStreamingService? = null
private var clientId: String? = null private var clientId: String? = null
private val _connected = MutableStateFlow(false) private val _connected = MutableStateFlow(false)
@@ -35,6 +41,12 @@ class LckControlClient(private val context: Context) {
private val _streamPlans = MutableStateFlow<List<StreamPlan>>(emptyList()) private val _streamPlans = MutableStateFlow<List<StreamPlan>>(emptyList())
val streamPlans: StateFlow<List<StreamPlan>> = _streamPlans.asStateFlow() val streamPlans: StateFlow<List<StreamPlan>> = _streamPlans.asStateFlow()
private val _streamingState = MutableStateFlow("IDLE")
val streamingState: StateFlow<String> = _streamingState.asStateFlow()
private val _streamingConnected = MutableStateFlow(false)
val streamingConnected: StateFlow<Boolean> = _streamingConnected.asStateFlow()
private val callback = object : ILckControlCallback.Stub() { private val callback = object : ILckControlCallback.Stub() {
override fun onStreamPlansChanged(plans: List<StreamPlan>) { override fun onStreamPlansChanged(plans: List<StreamPlan>) {
_streamPlans.value = plans _streamPlans.value = plans
@@ -54,6 +66,33 @@ class LckControlClient(private val context: Context) {
} }
} }
private val streamingCallback = object : ILckStreamingCallback.Stub() {
override fun onBufferReleased(bufferIndex: Int) {
onBufferReleasedListener?.invoke(bufferIndex)
}
override fun onStreamingStateChanged(state: String) {
_streamingState.value = state
}
override fun onStreamingError(code: Int, message: String) {
onStreamingErrorListener?.invoke(code, message)
}
override fun onStreamingStats(videoBitrate: Long, audioBitrate: Long, fps: Int, droppedFrames: Int) {
onStreamingStatsListener?.invoke(videoBitrate, audioBitrate, fps, droppedFrames)
}
}
/** Listener for buffer release events (game can reuse the buffer). */
var onBufferReleasedListener: ((Int) -> Unit)? = null
/** Listener for streaming errors. */
var onStreamingErrorListener: ((Int, String) -> Unit)? = null
/** Listener for streaming stats updates. */
var onStreamingStatsListener: ((Long, Long, Int, Int) -> Unit)? = null
private val connection = object : ServiceConnection { private val connection = object : ServiceConnection {
override fun onServiceConnected(name: ComponentName?, binder: IBinder?) { override fun onServiceConnected(name: ComponentName?, binder: IBinder?) {
service = ILckControlService.Stub.asInterface(binder) service = ILckControlService.Stub.asInterface(binder)
@@ -70,6 +109,20 @@ class LckControlClient(private val context: Context) {
} }
} }
private val streamingConnection = object : ServiceConnection {
override fun onServiceConnected(name: ComponentName?, binder: IBinder?) {
streamingService = ILckStreamingService.Stub.asInterface(binder)
streamingService?.registerStreamingCallback(streamingCallback)
_streamingConnected.value = true
}
override fun onServiceDisconnected(name: ComponentName?) {
streamingService = null
_streamingConnected.value = false
_streamingState.value = "IDLE"
}
}
fun bind(): Boolean { fun bind(): Boolean {
val intent = Intent().apply { val intent = Intent().apply {
component = ComponentName(SERVICE_PACKAGE, SERVICE_CLASS) component = ComponentName(SERVICE_PACKAGE, SERVICE_CLASS)
@@ -93,6 +146,51 @@ class LckControlClient(private val context: Context) {
_authenticated.value = false _authenticated.value = false
} }
// ── Streaming service ────────────────────────────────
fun bindStreaming(): Boolean {
val intent = Intent(ACTION_BIND_STREAMING).apply {
component = ComponentName(SERVICE_PACKAGE, SERVICE_CLASS)
}
return context.bindService(intent, streamingConnection, Context.BIND_AUTO_CREATE)
}
fun unbindStreaming() {
streamingService?.let { svc ->
svc.unregisterStreamingCallback(streamingCallback)
}
try {
context.unbindService(streamingConnection)
} catch (_: IllegalArgumentException) {}
streamingService = null
_streamingConnected.value = false
_streamingState.value = "IDLE"
}
// ── Texture pool ─────────────────────────────────────
fun registerTexturePool(buffers: Array<HardwareBuffer>, width: Int, height: Int, format: Int) {
streamingService?.registerTexturePool(buffers, width, height, format)
}
fun unregisterTexturePool() {
streamingService?.unregisterTexturePool()
}
// ── Frame submission (called from game render thread) ──
fun submitVideoFrame(bufferIndex: Int, timestampNs: Long, gpuFenceFd: ParcelFileDescriptor?) {
streamingService?.submitVideoFrame(bufferIndex, timestampNs, gpuFenceFd)
}
fun submitAudioFrame(pcmData: ByteArray, timestampNs: Long, sampleRate: Int, channels: Int, bitsPerSample: Int) {
streamingService?.submitAudioFrame(pcmData, timestampNs, sampleRate, channels, bitsPerSample)
}
fun isStreaming(): Boolean {
return streamingService?.isStreaming ?: false
}
// ── Auth ──────────────────────────────────────────── // ── Auth ────────────────────────────────────────────
fun isAuthenticated(): Boolean { fun isAuthenticated(): Boolean {

View File

@@ -0,0 +1,8 @@
package com.omixlab.lckcontrol.shared;
interface ILckStreamingCallback {
oneway void onBufferReleased(int bufferIndex);
oneway void onStreamingStateChanged(String state);
oneway void onStreamingError(int code, String message);
oneway void onStreamingStats(long videoBitrate, long audioBitrate, int fps, int droppedFrames);
}

View File

@@ -0,0 +1,22 @@
package com.omixlab.lckcontrol.shared;
import android.hardware.HardwareBuffer;
import android.os.ParcelFileDescriptor;
import com.omixlab.lckcontrol.shared.ILckStreamingCallback;
interface ILckStreamingService {
// Texture pool (game allocates, app receives)
void registerTexturePool(in HardwareBuffer[] buffers, int width, int height, int format);
void unregisterTexturePool();
// Frame submission (game -> app, one-way for performance)
oneway void submitVideoFrame(int bufferIndex, long timestampNs, in ParcelFileDescriptor gpuFence);
oneway void submitAudioFrame(in byte[] pcmData, long timestampNs, int sampleRate, int channels, int bitsPerSample);
// Streaming lifecycle
boolean isStreaming();
// Callbacks
void registerStreamingCallback(ILckStreamingCallback callback);
void unregisterStreamingCallback(ILckStreamingCallback callback);
}

View File

@@ -0,0 +1,3 @@
package com.omixlab.lckcontrol.shared;
parcelable StreamingConfig;

View File

@@ -10,6 +10,7 @@ data class LinkedAccount(
val accountId: String, val accountId: String,
val avatarUrl: String? = null, val avatarUrl: String? = null,
val isAuthenticated: Boolean = false, val isAuthenticated: Boolean = false,
val isEnabled: Boolean = true,
) : Parcelable { ) : Parcelable {
constructor(parcel: Parcel) : this( constructor(parcel: Parcel) : this(
@@ -19,6 +20,7 @@ data class LinkedAccount(
accountId = parcel.readString()!!, accountId = parcel.readString()!!,
avatarUrl = parcel.readString(), avatarUrl = parcel.readString(),
isAuthenticated = parcel.readInt() != 0, isAuthenticated = parcel.readInt() != 0,
isEnabled = if (parcel.dataAvail() > 0) parcel.readInt() != 0 else true,
) )
override fun writeToParcel(parcel: Parcel, flags: Int) { override fun writeToParcel(parcel: Parcel, flags: Int) {
@@ -28,6 +30,7 @@ data class LinkedAccount(
parcel.writeString(accountId) parcel.writeString(accountId)
parcel.writeString(avatarUrl) parcel.writeString(avatarUrl)
parcel.writeInt(if (isAuthenticated) 1 else 0) parcel.writeInt(if (isAuthenticated) 1 else 0)
parcel.writeInt(if (isEnabled) 1 else 0)
} }
override fun describeContents(): Int = 0 override fun describeContents(): Int = 0

View File

@@ -8,6 +8,8 @@ data class StreamPlan(
val name: String, val name: String,
val status: String = "DRAFT", val status: String = "DRAFT",
val destinations: List<StreamDestination> = emptyList(), val destinations: List<StreamDestination> = emptyList(),
val executionMode: String = "IN_GAME",
val gameId: String = "",
) : Parcelable { ) : Parcelable {
constructor(parcel: Parcel) : this( constructor(parcel: Parcel) : this(
@@ -15,6 +17,8 @@ data class StreamPlan(
name = parcel.readString()!!, name = parcel.readString()!!,
status = parcel.readString() ?: "DRAFT", status = parcel.readString() ?: "DRAFT",
destinations = parcel.createTypedArrayList(StreamDestination.CREATOR) ?: emptyList(), destinations = parcel.createTypedArrayList(StreamDestination.CREATOR) ?: emptyList(),
executionMode = if (parcel.dataAvail() > 0) parcel.readString() ?: "IN_GAME" else "IN_GAME",
gameId = if (parcel.dataAvail() > 0) parcel.readString() ?: "" else "",
) )
override fun writeToParcel(parcel: Parcel, flags: Int) { override fun writeToParcel(parcel: Parcel, flags: Int) {
@@ -22,6 +26,8 @@ data class StreamPlan(
parcel.writeString(name) parcel.writeString(name)
parcel.writeString(status) parcel.writeString(status)
parcel.writeTypedList(destinations) parcel.writeTypedList(destinations)
parcel.writeString(executionMode)
parcel.writeString(gameId)
} }
override fun describeContents(): Int = 0 override fun describeContents(): Int = 0

View File

@@ -6,16 +6,22 @@ import android.os.Parcelable
data class StreamPlanConfig( data class StreamPlanConfig(
val name: String, val name: String,
val destinations: List<StreamDestination> = emptyList(), val destinations: List<StreamDestination> = emptyList(),
val executionMode: String = "IN_GAME",
val gameId: String = "",
) : Parcelable { ) : Parcelable {
constructor(parcel: Parcel) : this( constructor(parcel: Parcel) : this(
name = parcel.readString()!!, name = parcel.readString()!!,
destinations = parcel.createTypedArrayList(StreamDestination.CREATOR) ?: emptyList(), destinations = parcel.createTypedArrayList(StreamDestination.CREATOR) ?: emptyList(),
executionMode = if (parcel.dataAvail() > 0) parcel.readString() ?: "IN_GAME" else "IN_GAME",
gameId = if (parcel.dataAvail() > 0) parcel.readString() ?: "" else "",
) )
override fun writeToParcel(parcel: Parcel, flags: Int) { override fun writeToParcel(parcel: Parcel, flags: Int) {
parcel.writeString(name) parcel.writeString(name)
parcel.writeTypedList(destinations) parcel.writeTypedList(destinations)
parcel.writeString(executionMode)
parcel.writeString(gameId)
} }
override fun describeContents(): Int = 0 override fun describeContents(): Int = 0

View File

@@ -0,0 +1,39 @@
package com.omixlab.lckcontrol.shared
import android.os.Parcel
import android.os.Parcelable
data class StreamingConfig(
val videoBitrate: Int = 6_000_000,
val videoCodec: String = "h264",
val audioBitrate: Int = 128_000,
val audioSampleRate: Int = 48_000,
val audioChannels: Int = 2,
val keyFrameInterval: Int = 2,
) : Parcelable {
constructor(parcel: Parcel) : this(
videoBitrate = parcel.readInt(),
videoCodec = parcel.readString() ?: "h264",
audioBitrate = parcel.readInt(),
audioSampleRate = parcel.readInt(),
audioChannels = parcel.readInt(),
keyFrameInterval = parcel.readInt(),
)
override fun writeToParcel(parcel: Parcel, flags: Int) {
parcel.writeInt(videoBitrate)
parcel.writeString(videoCodec)
parcel.writeInt(audioBitrate)
parcel.writeInt(audioSampleRate)
parcel.writeInt(audioChannels)
parcel.writeInt(keyFrameInterval)
}
override fun describeContents(): Int = 0
companion object CREATOR : Parcelable.Creator<StreamingConfig> {
override fun createFromParcel(parcel: Parcel) = StreamingConfig(parcel)
override fun newArray(size: Int) = arrayOfNulls<StreamingConfig>(size)
}
}