App streaming pipeline, dashboard server status, account enable/disable, game-linked plans

- Add C++ native streaming engine (RTMP client, EGL context, streaming engine, JNI bridge)
- Add pre-built arm64-v8a libs (librtmp, libssl, libcrypto, libz) and headers
- Add Kotlin streaming layer (NativeStreamingEngine, StreamingManager, StreamingStats)
- Add AIDL streaming interface (ILckStreamingService, ILckStreamingCallback, StreamingConfig)
- Add LckStreamingServiceImpl with BIND_STREAMING action support
- Add APP_STREAMING execution mode with auto-start/stop on plan lifecycle
- SDK: add bindStreaming(), submitVideoFrame(), submitAudioFrame() to LckControlClient
- Dashboard: replace linked accounts with server status card, move health polling from nav
- Remove health check dot overlay from Dashboard nav icon
- Accounts: add enable/disable toggle per account (persists locally, excluded from default plans)
- Plans: add gameId field linked to game package ID, resolved from ClientTracker for default plans
- Service: pass executionMode+gameId through createStreamPlan, filter enabled accounts in createDefaultPlan
- Room DB migration 4→5: add isEnabled column to linked_accounts, gameId column to stream_plans
- Add docs (hub vs control comparison)
This commit is contained in:
2026-02-28 20:05:21 +01:00
parent 1480a2944b
commit 097cd24ea9
59 changed files with 13609 additions and 89 deletions

View File

@@ -60,9 +60,20 @@ android {
buildConfigField("String", "DISPLAY_VERSION", "\"${gitDisplayVersion()}\"")
ndk {
abiFilters += listOf("arm64-v8a")
}
testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
}
externalNativeBuild {
cmake {
path = file("src/main/cpp/CMakeLists.txt")
version = "3.22.1"
}
}
buildTypes {
debug {
signingConfig = signingConfigs.getByName("release")

View File

@@ -69,6 +69,9 @@
<intent-filter>
<action android:name="com.omixlab.lckcontrol.BIND" />
</intent-filter>
<intent-filter>
<action android:name="com.omixlab.lckcontrol.BIND_STREAMING" />
</intent-filter>
</service>
</application>

View File

@@ -0,0 +1,49 @@
cmake_minimum_required(VERSION 3.22.1)
project(lck_streaming)
find_library(log-lib log)
find_library(android-lib android)
find_library(mediandk-lib mediandk)
find_library(egl-lib EGL)
find_library(glesv3-lib GLESv3)
find_library(nativewindow-lib nativewindow)
add_library(lck_streaming SHARED
jni_bridge.cpp
rtmp_client.cpp
rtmp_sink.cpp
egl_context.cpp
streaming_engine.cpp
)
target_include_directories(lck_streaming PRIVATE
${CMAKE_SOURCE_DIR}/third_party/librtmp/include
)
# Import pre-built librtmp from jniLibs
add_library(rtmp SHARED IMPORTED)
set_target_properties(rtmp PROPERTIES
IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/librtmp.so
)
add_library(ssl SHARED IMPORTED)
set_target_properties(ssl PROPERTIES
IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libssl.so
)
add_library(crypto SHARED IMPORTED)
set_target_properties(crypto PROPERTIES
IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLibs/arm64-v8a/libcrypto.so
)
target_link_libraries(lck_streaming
${log-lib}
${android-lib}
${mediandk-lib}
${egl-lib}
${glesv3-lib}
${nativewindow-lib}
rtmp
ssl
crypto
)

View File

@@ -0,0 +1,219 @@
#include "egl_context.h"
#include <android/log.h>
#include <unistd.h>
#define TAG "LckEglContext"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
#ifndef EGL_NATIVE_BUFFER_ANDROID
#define EGL_NATIVE_BUFFER_ANDROID 0x3140
#endif
#ifndef EGL_SYNC_NATIVE_FENCE_ANDROID
#define EGL_SYNC_NATIVE_FENCE_ANDROID 0x3144
#endif
#ifndef EGL_SYNC_NATIVE_FENCE_FD_ANDROID
#define EGL_SYNC_NATIVE_FENCE_FD_ANDROID 0x3145
#endif
#ifndef EGL_RECORDABLE_ANDROID
#define EGL_RECORDABLE_ANDROID 0x3142
#endif
EglContext::EglContext() {}
EglContext::~EglContext() {
Release();
}
bool EglContext::LoadExtensions() {
eglCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC)eglGetProcAddress("eglCreateSyncKHR");
eglWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC)eglGetProcAddress("eglWaitSyncKHR");
eglDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC)eglGetProcAddress("eglDestroySyncKHR");
eglGetNativeClientBufferANDROID = (PFNEGLGETNATIVECLIENTBUFFERANDROIDPROC)eglGetProcAddress("eglGetNativeClientBufferANDROID");
eglCreateImageKHR = (PFNEGLCREATEIMAGEKHRPROC)eglGetProcAddress("eglCreateImageKHR");
eglDestroyImageKHR = (PFNEGLDESTROYIMAGEKHRPROC)eglGetProcAddress("eglDestroyImageKHR");
glEGLImageTargetTexture2DOES = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)eglGetProcAddress("glEGLImageTargetTexture2DOES");
eglPresentationTimeANDROID = (PFNEGLPRESENTATIONTIMEANDROIDPROC)eglGetProcAddress("eglPresentationTimeANDROID");
if (!eglGetNativeClientBufferANDROID || !eglCreateImageKHR ||
!eglDestroyImageKHR || !glEGLImageTargetTexture2DOES) {
LOGE("Missing required EGL extensions for HardwareBuffer import");
return false;
}
return true;
}
bool EglContext::Init() {
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (display == EGL_NO_DISPLAY) {
LOGE("eglGetDisplay failed");
return false;
}
EGLint major, minor;
if (!eglInitialize(display, &major, &minor)) {
LOGE("eglInitialize failed");
return false;
}
LOGI("EGL initialized: %d.%d", major, minor);
// EGL config: RGBA8, ES3, recordable for MediaCodec
EGLint configAttribs[] = {
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES3_BIT,
EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RECORDABLE_ANDROID, EGL_TRUE,
EGL_NONE
};
EGLint numConfigs;
if (!eglChooseConfig(display, configAttribs, &config, 1, &numConfigs) || numConfigs == 0) {
LOGE("eglChooseConfig failed");
return false;
}
EGLint contextAttribs[] = {
EGL_CONTEXT_CLIENT_VERSION, 3,
EGL_NONE
};
context = eglCreateContext(display, config, EGL_NO_CONTEXT, contextAttribs);
if (context == EGL_NO_CONTEXT) {
LOGE("eglCreateContext failed");
return false;
}
if (!LoadExtensions()) {
return false;
}
LOGI("EGL context created successfully");
return true;
}
bool EglContext::CreateWindowSurface(ANativeWindow* window) {
if (surface != EGL_NO_SURFACE) {
eglDestroySurface(display, surface);
}
surface = eglCreateWindowSurface(display, config, window, nullptr);
if (surface == EGL_NO_SURFACE) {
LOGE("eglCreateWindowSurface failed: 0x%x", eglGetError());
return false;
}
eglQuerySurface(display, surface, EGL_WIDTH, &surfaceWidth);
eglQuerySurface(display, surface, EGL_HEIGHT, &surfaceHeight);
LOGI("EGL window surface created: %dx%d", surfaceWidth, surfaceHeight);
return true;
}
GLuint EglContext::ImportHardwareBuffer(AHardwareBuffer* buffer) {
if (!eglGetNativeClientBufferANDROID || !eglCreateImageKHR || !glEGLImageTargetTexture2DOES) {
LOGE("Missing EGL extensions for HardwareBuffer import");
return 0;
}
EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(buffer);
if (!clientBuffer) {
LOGE("eglGetNativeClientBufferANDROID failed");
return 0;
}
EGLint imageAttribs[] = {
EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
EGL_NONE
};
EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT,
EGL_NATIVE_BUFFER_ANDROID,
clientBuffer, imageAttribs);
if (image == EGL_NO_IMAGE_KHR) {
LOGE("eglCreateImageKHR failed: 0x%x", eglGetError());
return 0;
}
GLuint textureId;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image);
// We need to keep the image alive — store it associated with the texture
// The caller must call ReleaseImportedTexture to clean up
// For now, we destroy the image immediately since the texture retains the content
eglDestroyImageKHR(display, image);
return textureId;
}
void EglContext::ReleaseImportedTexture(GLuint textureId, EGLImageKHR image) {
if (textureId) {
glDeleteTextures(1, &textureId);
}
if (image != EGL_NO_IMAGE_KHR && eglDestroyImageKHR) {
eglDestroyImageKHR(display, image);
}
}
void EglContext::WaitFence(int fenceFd) {
if (fenceFd < 0) return;
if (eglCreateSyncKHR && eglWaitSyncKHR && eglDestroySyncKHR) {
EGLint attribs[] = {
EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fenceFd,
EGL_NONE
};
EGLSyncKHR sync = eglCreateSyncKHR(display, EGL_SYNC_NATIVE_FENCE_ANDROID, attribs);
if (sync != EGL_NO_SYNC_KHR) {
// GPU-side wait — doesn't block CPU
eglWaitSyncKHR(display, sync, 0);
eglDestroySyncKHR(display, sync);
// eglCreateSyncKHR takes ownership of fenceFd
return;
}
}
// Fallback: CPU-side wait
close(fenceFd);
}
void EglContext::SetPresentationTime(int64_t timestampNs) {
if (eglPresentationTimeANDROID && surface != EGL_NO_SURFACE) {
eglPresentationTimeANDROID(display, surface, timestampNs);
}
}
bool EglContext::MakeCurrent() {
return eglMakeCurrent(display, surface, surface, context) == EGL_TRUE;
}
bool EglContext::SwapBuffers() {
return eglSwapBuffers(display, surface) == EGL_TRUE;
}
void EglContext::Release() {
if (display != EGL_NO_DISPLAY) {
eglMakeCurrent(display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
if (surface != EGL_NO_SURFACE) {
eglDestroySurface(display, surface);
surface = EGL_NO_SURFACE;
}
if (context != EGL_NO_CONTEXT) {
eglDestroyContext(display, context);
context = EGL_NO_CONTEXT;
}
eglTerminate(display);
display = EGL_NO_DISPLAY;
}
LOGI("EGL resources released");
}

View File

@@ -0,0 +1,69 @@
#pragma once
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES3/gl3.h>
#include <GLES2/gl2ext.h>
#include <android/hardware_buffer.h>
/**
* EGL context for importing HardwareBuffers and blitting to encoder Surface.
* Handles EGL setup, HardwareBuffer→EGLImage→texture import, and fence sync.
*/
class EglContext {
public:
EglContext();
~EglContext();
/** Initialize EGL with a recordable config. Returns true on success. */
bool Init();
/** Create a window surface from an ANativeWindow (encoder input surface). */
bool CreateWindowSurface(ANativeWindow* window);
/** Import a HardwareBuffer as a GL texture. Returns texture ID (0 on failure). */
GLuint ImportHardwareBuffer(AHardwareBuffer* buffer);
/** Release a previously imported HardwareBuffer texture. */
void ReleaseImportedTexture(GLuint textureId, EGLImageKHR image);
/** Wait on a native GPU fence FD. Takes ownership of the FD. */
void WaitFence(int fenceFd);
/** Set presentation time on the current surface. */
void SetPresentationTime(int64_t timestampNs);
/** Make the window surface current. */
bool MakeCurrent();
/** Swap buffers on the window surface. */
bool SwapBuffers();
/** Release all EGL resources. */
void Release();
EGLDisplay GetDisplay() const { return display; }
int GetWidth() const { return surfaceWidth; }
int GetHeight() const { return surfaceHeight; }
private:
EGLDisplay display = EGL_NO_DISPLAY;
EGLContext context = EGL_NO_CONTEXT;
EGLSurface surface = EGL_NO_SURFACE;
EGLConfig config = nullptr;
int surfaceWidth = 0;
int surfaceHeight = 0;
// Extension function pointers
PFNEGLCREATESYNCKHRPROC eglCreateSyncKHR = nullptr;
PFNEGLWAITSYNCKHRPROC eglWaitSyncKHR = nullptr;
PFNEGLDESTROYSYNCKHRPROC eglDestroySyncKHR = nullptr;
PFNEGLGETNATIVECLIENTBUFFERANDROIDPROC eglGetNativeClientBufferANDROID = nullptr;
PFNEGLCREATEIMAGEKHRPROC eglCreateImageKHR = nullptr;
PFNEGLDESTROYIMAGEKHRPROC eglDestroyImageKHR = nullptr;
PFNGLEGLIMAGETARGETTEXTURE2DOESPROC glEGLImageTargetTexture2DOES = nullptr;
PFNEGLPRESENTATIONTIMEANDROIDPROC eglPresentationTimeANDROID = nullptr;
bool LoadExtensions();
};

View File

@@ -0,0 +1,162 @@
#include "streaming_engine.h"
#include <jni.h>
#include <android/hardware_buffer_jni.h>
#include <android/log.h>
#define TAG "LckJniBridge"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
static JavaVM* gJavaVM = nullptr;
// Cache for callback method IDs
static jmethodID gOnStatsMethod = nullptr;
static jmethodID gOnErrorMethod = nullptr;
static jmethodID gOnBufferReleasedMethod = nullptr;
JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved) {
gJavaVM = vm;
return JNI_VERSION_1_6;
}
extern "C" {
JNIEXPORT jlong JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeCreate(
JNIEnv* env, jobject thiz,
jint width, jint height,
jint videoBitrate, jint audioBitrate,
jint sampleRate, jint channels,
jint keyframeInterval) {
auto* engine = new StreamingEngine();
engine->Configure(width, height, videoBitrate, audioBitrate,
sampleRate, channels, keyframeInterval);
// Set up callbacks that call back into Kotlin
jobject globalRef = env->NewGlobalRef(thiz);
// Cache method IDs
jclass cls = env->GetObjectClass(thiz);
gOnStatsMethod = env->GetMethodID(cls, "onNativeStats", "(JJII)V");
gOnErrorMethod = env->GetMethodID(cls, "onNativeError", "(ILjava/lang/String;)V");
gOnBufferReleasedMethod = env->GetMethodID(cls, "onNativeBufferReleased", "(I)V");
engine->SetStatsCallback([globalRef](const StreamingStats& stats) {
JNIEnv* env;
if (gJavaVM->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
if (gJavaVM->AttachCurrentThread(&env, nullptr) != JNI_OK) return;
}
if (gOnStatsMethod) {
env->CallVoidMethod(globalRef, gOnStatsMethod,
(jlong)stats.videoBitrate, (jlong)stats.audioBitrate,
(jint)stats.fps, (jint)stats.droppedFrames);
}
});
engine->SetErrorCallback([globalRef](int code, const std::string& message) {
JNIEnv* env;
if (gJavaVM->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
if (gJavaVM->AttachCurrentThread(&env, nullptr) != JNI_OK) return;
}
if (gOnErrorMethod) {
jstring msg = env->NewStringUTF(message.c_str());
env->CallVoidMethod(globalRef, gOnErrorMethod, (jint)code, msg);
env->DeleteLocalRef(msg);
}
});
engine->SetBufferReleasedCallback([globalRef](int bufferIndex) {
JNIEnv* env;
if (gJavaVM->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
if (gJavaVM->AttachCurrentThread(&env, nullptr) != JNI_OK) return;
}
if (gOnBufferReleasedMethod) {
env->CallVoidMethod(globalRef, gOnBufferReleasedMethod, (jint)bufferIndex);
}
});
LOGI("Native engine created: %dx%d", width, height);
return reinterpret_cast<jlong>(engine);
}
JNIEXPORT jint JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeAddDestination(
JNIEnv* env, jobject thiz, jlong ptr, jstring rtmpUrl) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return -1;
const char* url = env->GetStringUTFChars(rtmpUrl, nullptr);
int index = engine->AddDestination(url);
env->ReleaseStringUTFChars(rtmpUrl, url);
return index;
}
JNIEXPORT jboolean JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeStart(
JNIEnv* env, jobject thiz, jlong ptr) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return JNI_FALSE;
return engine->Start() ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT void JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeSubmitVideoFrame(
JNIEnv* env, jobject thiz, jlong ptr,
jobject hardwareBuffer, jlong timestampNs, jint fenceFd) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return;
AHardwareBuffer* buffer = AHardwareBuffer_fromHardwareBuffer(env, hardwareBuffer);
if (!buffer) {
LOGE("Failed to get AHardwareBuffer from Java HardwareBuffer");
return;
}
engine->SubmitVideoFrame(buffer, timestampNs, fenceFd);
}
JNIEXPORT void JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeSubmitAudioFrame(
JNIEnv* env, jobject thiz, jlong ptr,
jbyteArray pcmData, jlong timestampNs) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return;
jsize len = env->GetArrayLength(pcmData);
jbyte* data = env->GetByteArrayElements(pcmData, nullptr);
engine->SubmitAudioFrame(reinterpret_cast<const uint8_t*>(data), len, timestampNs);
env->ReleaseByteArrayElements(pcmData, data, JNI_ABORT);
}
JNIEXPORT void JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeStop(
JNIEnv* env, jobject thiz, jlong ptr) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return;
engine->Stop();
}
JNIEXPORT void JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeDestroy(
JNIEnv* env, jobject thiz, jlong ptr) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (engine) {
engine->Stop();
delete engine;
LOGI("Native engine destroyed");
}
}
JNIEXPORT jboolean JNICALL
Java_com_omixlab_lckcontrol_streaming_NativeStreamingEngine_nativeIsRunning(
JNIEnv* env, jobject thiz, jlong ptr) {
auto* engine = reinterpret_cast<StreamingEngine*>(ptr);
if (!engine) return JNI_FALSE;
return engine->IsRunning() ? JNI_TRUE : JNI_FALSE;
}
} // extern "C"

View File

@@ -0,0 +1,177 @@
#include "rtmp_client.h"
#include <android/log.h>
#include <cstring>
extern "C" {
#include <librtmp/rtmp.h>
#include <librtmp/log.h>
}
#define TAG "LckRtmpClient"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
RtmpClient::RtmpClient() {}
RtmpClient::~RtmpClient() {
Disconnect();
}
bool RtmpClient::Connect(const std::string& rtmpUrl) {
if (connected) {
LOGW("Already connected, disconnecting first");
Disconnect();
}
RTMP_LogSetLevel(RTMP_LOGWARNING);
rtmpContext = RTMP_Alloc();
if (!rtmpContext) {
LOGE("RTMP_Alloc failed");
return false;
}
RTMP_Init(rtmpContext);
// RTMP_SetupURL needs a mutable char*
std::vector<char> urlBuffer(rtmpUrl.begin(), rtmpUrl.end());
urlBuffer.push_back('\0');
if (!RTMP_SetupURL(rtmpContext, urlBuffer.data())) {
LOGE("RTMP_SetupURL failed");
RTMP_Free(rtmpContext);
rtmpContext = nullptr;
return false;
}
RTMP_EnableWrite(rtmpContext);
if (!RTMP_Connect(rtmpContext, nullptr)) {
LOGE("RTMP_Connect failed");
RTMP_Free(rtmpContext);
rtmpContext = nullptr;
return false;
}
if (!RTMP_ConnectStream(rtmpContext, 0)) {
LOGE("RTMP_ConnectStream failed");
RTMP_Close(rtmpContext);
RTMP_Free(rtmpContext);
rtmpContext = nullptr;
return false;
}
connected = true;
LOGI("RTMP connected");
return true;
}
void RtmpClient::Disconnect() {
if (rtmpContext) {
RTMP_Close(rtmpContext);
RTMP_Free(rtmpContext);
rtmpContext = nullptr;
LOGI("RTMP disconnected");
}
connected = false;
}
bool RtmpClient::IsConnected() const {
return connected && rtmpContext && RTMP_IsConnected(rtmpContext);
}
bool RtmpClient::SendRtmpPacket(uint8_t packetType, uint32_t timestampMs, const uint8_t* data, uint32_t size) {
if (!IsConnected())
return false;
RTMPPacket pkt;
RTMPPacket_Alloc(&pkt, size);
pkt.m_packetType = packetType;
pkt.m_nChannel = (packetType == RTMP_PACKET_TYPE_VIDEO) ? 0x06 : 0x07;
pkt.m_headerType = RTMP_PACKET_SIZE_LARGE;
pkt.m_nTimeStamp = timestampMs;
pkt.m_hasAbsTimestamp = 1;
pkt.m_nInfoField2 = rtmpContext->m_stream_id;
pkt.m_nBodySize = size;
memcpy(pkt.m_body, data, size);
int ret = RTMP_SendPacket(rtmpContext, &pkt, 0);
RTMPPacket_Free(&pkt);
if (!ret) {
LOGW("RTMP_SendPacket failed (type=%d, size=%u)", packetType, size);
connected = false;
}
return ret != 0;
}
bool RtmpClient::SendAvcSequenceHeader(const uint8_t* extraData, uint32_t extraDataSize) {
// FLV video tag: keyframe(1) + AVC(7) = 0x17, AVC sequence header = 0x00, composition time = 0
uint32_t bodySize = 5 + extraDataSize;
std::vector<uint8_t> body(bodySize);
body[0] = 0x17; // keyframe + AVC
body[1] = 0x00; // AVC sequence header
body[2] = 0x00; // composition time
body[3] = 0x00;
body[4] = 0x00;
memcpy(body.data() + 5, extraData, extraDataSize);
return SendRtmpPacket(RTMP_PACKET_TYPE_VIDEO, 0, body.data(), bodySize);
}
void RtmpClient::BuildAudioSpecificConfig(uint8_t outConfig[2], uint32_t sampleRate, uint32_t numChannels) {
static const uint32_t sampleRateTable[] = {
96000, 88200, 64000, 48000, 44100, 32000,
24000, 22050, 16000, 12000, 11025, 8000, 7350
};
uint8_t freqIndex = 4; // default 44100
for (uint8_t i = 0; i < 13; ++i) {
if (sampleRateTable[i] == sampleRate) {
freqIndex = i;
break;
}
}
uint8_t channelConfig = static_cast<uint8_t>(numChannels < 1 ? 1 : (numChannels > 7 ? 7 : numChannels));
// Pack: AAAAA FFFF CCCC 000
outConfig[0] = (2 << 3) | (freqIndex >> 1);
outConfig[1] = ((freqIndex & 1) << 7) | (channelConfig << 3);
}
bool RtmpClient::SendAacSequenceHeader(uint32_t sampleRate, uint32_t numChannels) {
// FLV audio tag: AAC(10) + 44100(3) + 16-bit(1) + stereo(1) = 0xAF, AAC sequence header = 0x00
uint8_t body[4];
body[0] = 0xAF;
body[1] = 0x00;
BuildAudioSpecificConfig(body + 2, sampleRate, numChannels);
return SendRtmpPacket(RTMP_PACKET_TYPE_AUDIO, 0, body, sizeof(body));
}
bool RtmpClient::SendVideoPacket(const uint8_t* data, uint32_t size, uint32_t timestampMs, bool isKeyframe) {
uint32_t bodySize = 5 + size;
std::vector<uint8_t> body(bodySize);
body[0] = isKeyframe ? 0x17 : 0x27;
body[1] = 0x01; // AVC NALU
body[2] = 0x00; // composition time offset
body[3] = 0x00;
body[4] = 0x00;
memcpy(body.data() + 5, data, size);
return SendRtmpPacket(RTMP_PACKET_TYPE_VIDEO, timestampMs, body.data(), bodySize);
}
bool RtmpClient::SendAudioPacket(const uint8_t* data, uint32_t size, uint32_t timestampMs) {
uint32_t bodySize = 2 + size;
std::vector<uint8_t> body(bodySize);
body[0] = 0xAF;
body[1] = 0x01; // AAC raw
memcpy(body.data() + 2, data, size);
return SendRtmpPacket(RTMP_PACKET_TYPE_AUDIO, timestampMs, body.data(), bodySize);
}

View File

@@ -0,0 +1,34 @@
#pragma once
#include <cstdint>
#include <string>
#include <vector>
struct RTMP;
/**
* Low-level librtmp wrapper for RTMP streaming.
* Ported from FLCKRtmpClient (UE5 LCKStreaming plugin).
* All methods should be called from the same thread (encoder thread).
*/
class RtmpClient {
public:
RtmpClient();
~RtmpClient();
bool Connect(const std::string& rtmpUrl);
void Disconnect();
bool IsConnected() const;
bool SendAvcSequenceHeader(const uint8_t* extraData, uint32_t extraDataSize);
bool SendAacSequenceHeader(uint32_t sampleRate, uint32_t numChannels);
bool SendVideoPacket(const uint8_t* data, uint32_t size, uint32_t timestampMs, bool isKeyframe);
bool SendAudioPacket(const uint8_t* data, uint32_t size, uint32_t timestampMs);
private:
bool SendRtmpPacket(uint8_t packetType, uint32_t timestampMs, const uint8_t* data, uint32_t size);
static void BuildAudioSpecificConfig(uint8_t outConfig[2], uint32_t sampleRate, uint32_t numChannels);
RTMP* rtmpContext = nullptr;
bool connected = false;
};

View File

@@ -0,0 +1,278 @@
#include "rtmp_sink.h"
#include <android/log.h>
#include <cstring>
#include <algorithm>
#define TAG "LckRtmpSink"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
RtmpSink::RtmpSink() {}
RtmpSink::~RtmpSink() {
if (isOpen) {
Close();
}
}
void RtmpSink::SetRtmpUrl(const std::string& url) {
rtmpUrl = url;
}
bool RtmpSink::Open(uint32_t width, uint32_t height, uint32_t framerate,
uint32_t sampleRate, uint32_t numChannels) {
if (rtmpUrl.empty()) {
LOGE("RTMP URL not set");
return false;
}
storedSampleRate = sampleRate;
storedNumChannels = numChannels;
if (!rtmpClient.Connect(rtmpUrl)) {
LOGE("Failed to connect RTMP");
return false;
}
isOpen = true;
videoHeaderSent = false;
audioHeaderSent = false;
LOGI("RTMP sink opened: %dx%d@%dfps, %dHz %dch",
width, height, framerate, sampleRate, numChannels);
return true;
}
void RtmpSink::OnVideoFormatReady(const uint8_t* extraData, uint32_t extraDataSize) {
if (!isOpen) return;
if (extraData && extraDataSize > 0) {
// Check if already AVCC format (starts with version byte 0x01)
if (extraDataSize > 4 && extraData[0] == 0x01) {
if (rtmpClient.SendAvcSequenceHeader(extraData, extraDataSize)) {
videoHeaderSent = true;
LOGI("Sent AVC sequence header (AVCC, %u bytes)", extraDataSize);
}
} else {
// Annex-B format - extract and convert
TryExtractAndSendSequenceHeader(extraData, extraDataSize);
}
}
}
void RtmpSink::OnAudioFormatReady(uint32_t sampleRate, uint32_t numChannels) {
if (!isOpen) return;
storedSampleRate = sampleRate;
storedNumChannels = numChannels;
if (rtmpClient.SendAacSequenceHeader(sampleRate, numChannels)) {
audioHeaderSent = true;
LOGI("Sent AAC sequence header (%dHz, %dch)", sampleRate, numChannels);
}
}
void RtmpSink::SendVideoPacket(const uint8_t* data, uint32_t size,
int64_t timestampMs, bool isKeyframe) {
if (!isOpen || !rtmpClient.IsConnected()) return;
// If we haven't sent the video sequence header yet and this is a keyframe,
// try to extract SPS/PPS from it
if (!videoHeaderSent && isKeyframe) {
TryExtractAndSendSequenceHeader(data, size);
}
if (!videoHeaderSent) return;
// Send AAC sequence header on first video packet if not sent yet
if (!audioHeaderSent) {
if (rtmpClient.SendAacSequenceHeader(storedSampleRate, storedNumChannels)) {
audioHeaderSent = true;
LOGI("Sent AAC sequence header (deferred, %dHz, %dch)",
storedSampleRate, storedNumChannels);
}
}
uint32_t ts = static_cast<uint32_t>(std::max<int64_t>(timestampMs, 0));
// Convert Annex-B to AVCC for RTMP/FLV
std::vector<uint8_t> avccData = ConvertAnnexBToAvcc(data, size);
if (!avccData.empty()) {
rtmpClient.SendVideoPacket(avccData.data(), static_cast<uint32_t>(avccData.size()),
ts, isKeyframe);
}
}
void RtmpSink::SendAudioPacket(const uint8_t* data, uint32_t size, int64_t timestampMs) {
if (!isOpen || !rtmpClient.IsConnected()) return;
if (!audioHeaderSent || !videoHeaderSent) return;
uint32_t ts = static_cast<uint32_t>(std::max<int64_t>(timestampMs, 0));
rtmpClient.SendAudioPacket(data, size, ts);
}
void RtmpSink::Close() {
if (isOpen) {
rtmpClient.Disconnect();
isOpen = false;
videoHeaderSent = false;
audioHeaderSent = false;
LOGI("RTMP sink closed");
}
}
bool RtmpSink::IsOpen() const {
return isOpen;
}
bool RtmpSink::TryExtractAndSendSequenceHeader(const uint8_t* data, uint32_t size) {
// Parse Annex-B bitstream to find SPS and PPS NALUs
const uint8_t* sps = nullptr;
uint32_t spsSize = 0;
const uint8_t* pps = nullptr;
uint32_t ppsSize = 0;
const uint8_t* end = data + size;
auto findStartCode = [](const uint8_t* p, const uint8_t* end) -> const uint8_t* {
while (p + 3 <= end) {
if (p[0] == 0 && p[1] == 0) {
if (p[2] == 1) return p + 3;
if (p + 3 < end && p[2] == 0 && p[3] == 1) return p + 4;
}
p++;
}
return nullptr;
};
const uint8_t* pos = findStartCode(data, end);
while (pos && pos < end) {
uint8_t currentNaluType = pos[0] & 0x1F;
const uint8_t* currentNaluStart = pos;
const uint8_t* nextStart = findStartCode(pos, end);
const uint8_t* naluEnd;
if (nextStart) {
naluEnd = nextStart - 3;
if (naluEnd > data && *(naluEnd - 1) == 0) naluEnd--;
} else {
naluEnd = end;
}
uint32_t naluSize = static_cast<uint32_t>(naluEnd - currentNaluStart);
if (currentNaluType == 7 && !sps) { // SPS
sps = currentNaluStart;
spsSize = naluSize;
} else if (currentNaluType == 8 && !pps) { // PPS
pps = currentNaluStart;
ppsSize = naluSize;
}
if (sps && pps) break;
pos = nextStart;
}
if (sps && spsSize > 0 && pps && ppsSize > 0) {
std::vector<uint8_t> avcc = BuildAvccFromAnnexB(sps, spsSize, pps, ppsSize);
if (rtmpClient.SendAvcSequenceHeader(avcc.data(), static_cast<uint32_t>(avcc.size()))) {
videoHeaderSent = true;
LOGI("Sent AVC sequence header (extracted SPS=%u PPS=%u)", spsSize, ppsSize);
return true;
} else {
LOGE("SendAvcSequenceHeader failed (SPS=%u PPS=%u)", spsSize, ppsSize);
}
}
return false;
}
std::vector<uint8_t> RtmpSink::ConvertAnnexBToAvcc(const uint8_t* data, uint32_t size) {
std::vector<uint8_t> result;
result.reserve(size);
auto findStartCode = [](const uint8_t* p, const uint8_t* end, int& startCodeLen) -> const uint8_t* {
while (p + 3 <= end) {
if (p[0] == 0 && p[1] == 0) {
if (p + 3 < end && p[2] == 0 && p[3] == 1) {
startCodeLen = 4;
return p;
}
if (p[2] == 1) {
startCodeLen = 3;
return p;
}
}
p++;
}
return nullptr;
};
const uint8_t* pos = data;
const uint8_t* end = data + size;
int startCodeLen = 0;
const uint8_t* startCode = findStartCode(pos, end, startCodeLen);
if (!startCode) {
// No start codes found - pass through
result.insert(result.end(), data, data + size);
return result;
}
while (startCode) {
const uint8_t* naluStart = startCode + startCodeLen;
if (naluStart >= end) break;
int nextStartCodeLen = 0;
const uint8_t* nextStartCode = findStartCode(naluStart, end, nextStartCodeLen);
uint32_t naluSize = nextStartCode
? static_cast<uint32_t>(nextStartCode - naluStart)
: static_cast<uint32_t>(end - naluStart);
if (naluSize > 0) {
uint8_t naluType = naluStart[0] & 0x1F;
// Skip SPS (7), PPS (8), AUD (9)
if (naluType != 7 && naluType != 8 && naluType != 9) {
result.push_back(static_cast<uint8_t>(naluSize >> 24));
result.push_back(static_cast<uint8_t>(naluSize >> 16));
result.push_back(static_cast<uint8_t>(naluSize >> 8));
result.push_back(static_cast<uint8_t>(naluSize & 0xFF));
result.insert(result.end(), naluStart, naluStart + naluSize);
}
}
startCode = nextStartCode;
startCodeLen = nextStartCodeLen;
}
return result;
}
std::vector<uint8_t> RtmpSink::BuildAvccFromAnnexB(const uint8_t* sps, uint32_t spsSize,
const uint8_t* pps, uint32_t ppsSize) {
// AVCDecoderConfigurationRecord
std::vector<uint8_t> record;
record.reserve(11 + spsSize + ppsSize);
record.push_back(0x01); // configurationVersion
record.push_back(spsSize > 1 ? sps[1] : 0x42); // AVCProfileIndication
record.push_back(spsSize > 2 ? sps[2] : 0x00); // profile_compatibility
record.push_back(spsSize > 3 ? sps[3] : 0x1E); // AVCLevelIndication
record.push_back(0xFF); // lengthSizeMinusOne = 3 (4 bytes)
record.push_back(0xE1); // numOfSequenceParameterSets = 1
// SPS length (big-endian)
record.push_back(static_cast<uint8_t>(spsSize >> 8));
record.push_back(static_cast<uint8_t>(spsSize & 0xFF));
record.insert(record.end(), sps, sps + spsSize);
record.push_back(0x01); // numOfPictureParameterSets = 1
// PPS length (big-endian)
record.push_back(static_cast<uint8_t>(ppsSize >> 8));
record.push_back(static_cast<uint8_t>(ppsSize & 0xFF));
record.insert(record.end(), pps, pps + ppsSize);
return record;
}

View File

@@ -0,0 +1,43 @@
#pragma once
#include "rtmp_client.h"
#include <cstdint>
#include <string>
#include <vector>
/**
* RTMP sink that bridges encoded packets to an RTMP endpoint.
* Ported from FLCKRtmpSink (UE5 LCKStreaming plugin).
* Handles Annex-B to AVCC conversion, sequence headers, and FLV framing.
*/
class RtmpSink {
public:
RtmpSink();
~RtmpSink();
void SetRtmpUrl(const std::string& url);
bool Open(uint32_t width, uint32_t height, uint32_t framerate,
uint32_t sampleRate, uint32_t numChannels);
void OnVideoFormatReady(const uint8_t* extraData, uint32_t extraDataSize);
void OnAudioFormatReady(uint32_t sampleRate, uint32_t numChannels);
void SendVideoPacket(const uint8_t* data, uint32_t size,
int64_t timestampMs, bool isKeyframe);
void SendAudioPacket(const uint8_t* data, uint32_t size, int64_t timestampMs);
void Close();
bool IsOpen() const;
private:
bool TryExtractAndSendSequenceHeader(const uint8_t* data, uint32_t size);
static std::vector<uint8_t> ConvertAnnexBToAvcc(const uint8_t* data, uint32_t size);
static std::vector<uint8_t> BuildAvccFromAnnexB(const uint8_t* sps, uint32_t spsSize,
const uint8_t* pps, uint32_t ppsSize);
RtmpClient rtmpClient;
std::string rtmpUrl;
bool isOpen = false;
bool videoHeaderSent = false;
bool audioHeaderSent = false;
uint32_t storedSampleRate = 48000;
uint32_t storedNumChannels = 2;
};

View File

@@ -0,0 +1,587 @@
#include "streaming_engine.h"
#include <android/log.h>
#include <GLES3/gl3.h>
#include <GLES2/gl2ext.h>
#include <unistd.h>
#include <cstring>
#include <algorithm>
#define TAG "LckStreamingEngine"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
// Shader source for blitting OES texture to framebuffer
static const char* BLIT_VERTEX_SHADER = R"(#version 300 es
layout(location = 0) in vec2 aPos;
layout(location = 1) in vec2 aTexCoord;
out vec2 vTexCoord;
void main() {
gl_Position = vec4(aPos, 0.0, 1.0);
vTexCoord = aTexCoord;
}
)";
static const char* BLIT_FRAGMENT_SHADER = R"(#version 300 es
#extension GL_OES_EGL_image_external_essl3 : require
precision mediump float;
in vec2 vTexCoord;
out vec4 fragColor;
uniform samplerExternalOES uTexture;
void main() {
fragColor = texture(uTexture, vTexCoord);
}
)";
static GLuint CompileShader(GLenum type, const char* source) {
GLuint shader = glCreateShader(type);
glShaderSource(shader, 1, &source, nullptr);
glCompileShader(shader);
GLint status;
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
if (!status) {
char log[512];
glGetShaderInfoLog(shader, sizeof(log), nullptr, log);
LOGE("Shader compile error: %s", log);
glDeleteShader(shader);
return 0;
}
return shader;
}
StreamingEngine::StreamingEngine() {}
StreamingEngine::~StreamingEngine() {
Stop();
for (auto* sink : sinks) {
delete sink;
}
sinks.clear();
}
bool StreamingEngine::Configure(int w, int h, int vBitrate, int aBitrate,
int sr, int ch, int kfi) {
width = w;
height = h;
videoBitrate = vBitrate;
audioBitrate = aBitrate;
sampleRate = sr;
channels = ch;
keyframeInterval = kfi;
return true;
}
int StreamingEngine::AddDestination(const std::string& rtmpUrl) {
auto* sink = new RtmpSink();
sink->SetRtmpUrl(rtmpUrl);
sinks.push_back(sink);
return static_cast<int>(sinks.size() - 1);
}
bool StreamingEngine::InitVideoEncoder() {
videoEncoder = AMediaCodec_createEncoderByType("video/avc");
if (!videoEncoder) {
LOGE("Failed to create video encoder");
return false;
}
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, "video/avc");
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, width);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, height);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, videoBitrate);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, framerate);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, keyframeInterval);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, 0x7F000789); // COLOR_FormatSurface
AMediaFormat_setInt32(format, "profile", 8); // AVCProfileHigh
AMediaFormat_setInt32(format, "level", 2048); // AVCLevel42
AMediaFormat_setInt32(format, "bitrate-mode", 2); // CBR
media_status_t status = AMediaCodec_configure(videoEncoder, format, nullptr, nullptr,
AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
AMediaFormat_delete(format);
if (status != AMEDIA_OK) {
LOGE("Video encoder configure failed: %d", status);
AMediaCodec_delete(videoEncoder);
videoEncoder = nullptr;
return false;
}
status = AMediaCodec_createInputSurface(videoEncoder, &encoderSurface);
if (status != AMEDIA_OK || !encoderSurface) {
LOGE("Failed to create encoder input surface: %d", status);
AMediaCodec_delete(videoEncoder);
videoEncoder = nullptr;
return false;
}
status = AMediaCodec_start(videoEncoder);
if (status != AMEDIA_OK) {
LOGE("Video encoder start failed: %d", status);
ANativeWindow_release(encoderSurface);
encoderSurface = nullptr;
AMediaCodec_delete(videoEncoder);
videoEncoder = nullptr;
return false;
}
LOGI("Video encoder started: %dx%d @ %d bps", width, height, videoBitrate);
return true;
}
bool StreamingEngine::InitAudioEncoder() {
audioEncoder = AMediaCodec_createEncoderByType("audio/mp4a-latm");
if (!audioEncoder) {
LOGE("Failed to create audio encoder");
return false;
}
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, "audio/mp4a-latm");
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_AAC_PROFILE, 2); // AAC-LC
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, audioBitrate);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, sampleRate);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, channels);
media_status_t status = AMediaCodec_configure(audioEncoder, format, nullptr, nullptr,
AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
AMediaFormat_delete(format);
if (status != AMEDIA_OK) {
LOGE("Audio encoder configure failed: %d", status);
AMediaCodec_delete(audioEncoder);
audioEncoder = nullptr;
return false;
}
status = AMediaCodec_start(audioEncoder);
if (status != AMEDIA_OK) {
LOGE("Audio encoder start failed: %d", status);
AMediaCodec_delete(audioEncoder);
audioEncoder = nullptr;
return false;
}
LOGI("Audio encoder started: %dHz %dch @ %d bps", sampleRate, channels, audioBitrate);
return true;
}
bool StreamingEngine::InitBlitResources() {
GLuint vs = CompileShader(GL_VERTEX_SHADER, BLIT_VERTEX_SHADER);
GLuint fs = CompileShader(GL_FRAGMENT_SHADER, BLIT_FRAGMENT_SHADER);
if (!vs || !fs) return false;
blitProgram = glCreateProgram();
glAttachShader(blitProgram, vs);
glAttachShader(blitProgram, fs);
glLinkProgram(blitProgram);
glDeleteShader(vs);
glDeleteShader(fs);
GLint linkStatus;
glGetProgramiv(blitProgram, GL_LINK_STATUS, &linkStatus);
if (!linkStatus) {
LOGE("Blit program link failed");
glDeleteProgram(blitProgram);
blitProgram = 0;
return false;
}
// Full-screen quad: pos(x,y) + texcoord(u,v)
float quad[] = {
-1.0f, -1.0f, 0.0f, 0.0f,
1.0f, -1.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 1.0f, 1.0f,
};
glGenVertexArrays(1, &blitVao);
glGenBuffers(1, &blitVbo);
glBindVertexArray(blitVao);
glBindBuffer(GL_ARRAY_BUFFER, blitVbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(quad), quad, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)(2 * sizeof(float)));
glEnableVertexAttribArray(1);
glBindVertexArray(0);
return true;
}
void StreamingEngine::ReleaseBlitResources() {
if (blitVao) { glDeleteVertexArrays(1, &blitVao); blitVao = 0; }
if (blitVbo) { glDeleteBuffers(1, &blitVbo); blitVbo = 0; }
if (blitProgram) { glDeleteProgram(blitProgram); blitProgram = 0; }
}
bool StreamingEngine::Start() {
if (running.load()) return true;
if (width <= 0 || height <= 0) {
LOGE("Invalid dimensions: %dx%d", width, height);
return false;
}
if (sinks.empty()) {
LOGE("No destinations configured");
return false;
}
running.store(true);
firstVideoFrame = true;
startTimestampNs = 0;
statsVideoBytes = 0;
statsAudioBytes = 0;
statsFrameCount = 0;
statsLastUpdateNs = 0;
encoderThread = std::thread(&StreamingEngine::EncoderThreadFunc, this);
return true;
}
void StreamingEngine::EncoderThreadFunc() {
LOGI("Encoder thread started");
// Init EGL
if (!eglContext.Init()) {
LOGE("EGL init failed");
running.store(false);
if (errorCallback) errorCallback(1, "EGL initialization failed");
return;
}
// Init video encoder (creates input surface)
if (!InitVideoEncoder()) {
LOGE("Video encoder init failed");
eglContext.Release();
running.store(false);
if (errorCallback) errorCallback(2, "Video encoder initialization failed");
return;
}
// Create EGL window surface from encoder input surface
if (!eglContext.CreateWindowSurface(encoderSurface)) {
LOGE("EGL window surface creation failed");
AMediaCodec_stop(videoEncoder);
AMediaCodec_delete(videoEncoder);
videoEncoder = nullptr;
ANativeWindow_release(encoderSurface);
encoderSurface = nullptr;
eglContext.Release();
running.store(false);
if (errorCallback) errorCallback(3, "EGL window surface creation failed");
return;
}
if (!eglContext.MakeCurrent()) {
LOGE("EGL make current failed");
running.store(false);
if (errorCallback) errorCallback(4, "EGL make current failed");
return;
}
// Init blit resources
if (!InitBlitResources()) {
LOGE("Blit resources init failed");
running.store(false);
if (errorCallback) errorCallback(5, "Blit resources initialization failed");
return;
}
// Init audio encoder
if (!InitAudioEncoder()) {
LOGW("Audio encoder init failed, continuing without audio");
}
// Open RTMP sinks
for (auto* sink : sinks) {
if (!sink->Open(width, height, framerate, sampleRate, channels)) {
LOGE("Failed to open RTMP sink");
if (errorCallback) errorCallback(6, "RTMP connection failed");
}
}
LOGI("Streaming engine fully initialized");
// Main encoder loop
while (running.load()) {
// Process video frames
{
std::lock_guard<std::mutex> lock(videoMutex);
for (auto& frame : videoQueue) {
ProcessVideoFrame(frame);
}
videoQueue.clear();
}
// Process audio frames
{
std::lock_guard<std::mutex> lock(audioMutex);
for (auto& frame : audioQueue) {
ProcessAudioFrame(frame);
}
audioQueue.clear();
}
// Drain encoders
DrainVideoEncoder();
if (audioEncoder) {
DrainAudioEncoder();
}
// Don't spin-wait
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
// Cleanup
LOGI("Encoder thread shutting down");
ReleaseBlitResources();
for (auto* sink : sinks) {
sink->Close();
}
if (videoEncoder) {
AMediaCodec_stop(videoEncoder);
AMediaCodec_delete(videoEncoder);
videoEncoder = nullptr;
}
if (encoderSurface) {
ANativeWindow_release(encoderSurface);
encoderSurface = nullptr;
}
if (audioEncoder) {
AMediaCodec_stop(audioEncoder);
AMediaCodec_delete(audioEncoder);
audioEncoder = nullptr;
}
eglContext.Release();
LOGI("Encoder thread stopped");
}
void StreamingEngine::ProcessVideoFrame(const VideoFrame& frame) {
if (!frame.buffer) return;
if (firstVideoFrame) {
startTimestampNs = frame.timestampNs;
firstVideoFrame = false;
}
// Wait on GPU fence
eglContext.WaitFence(frame.fenceFd);
// Import HardwareBuffer as GL texture
GLuint texture = eglContext.ImportHardwareBuffer(frame.buffer);
if (texture == 0) {
LOGW("Failed to import HardwareBuffer as texture");
return;
}
// Blit to encoder surface
BlitToEncoder(texture, frame.timestampNs);
// Clean up texture
glDeleteTextures(1, &texture);
}
void StreamingEngine::BlitToEncoder(GLuint srcTexture, int64_t timestampNs) {
glViewport(0, 0, width, height);
glUseProgram(blitProgram);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, srcTexture);
glUniform1i(glGetUniformLocation(blitProgram, "uTexture"), 0);
glBindVertexArray(blitVao);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindVertexArray(0);
eglContext.SetPresentationTime(timestampNs);
eglContext.SwapBuffers();
}
void StreamingEngine::ProcessAudioFrame(const AudioFrame& frame) {
if (!audioEncoder || frame.pcmData.empty()) return;
ssize_t inputIndex = AMediaCodec_dequeueInputBuffer(audioEncoder, 0);
if (inputIndex < 0) {
LOGW("No audio input buffer available");
return;
}
size_t bufferSize;
uint8_t* inputBuffer = AMediaCodec_getInputBuffer(audioEncoder, inputIndex, &bufferSize);
if (!inputBuffer) return;
size_t copySize = std::min(frame.pcmData.size(), bufferSize);
memcpy(inputBuffer, frame.pcmData.data(), copySize);
int64_t relativeTs = frame.timestampNs - startTimestampNs;
AMediaCodec_queueInputBuffer(audioEncoder, inputIndex, 0, copySize,
relativeTs / 1000, 0);
}
void StreamingEngine::DrainVideoEncoder() {
if (!videoEncoder) return;
AMediaCodecBufferInfo info;
ssize_t outputIndex;
while ((outputIndex = AMediaCodec_dequeueOutputBuffer(videoEncoder, &info, 0)) >= 0) {
if (info.size > 0) {
size_t outSize;
uint8_t* outputData = AMediaCodec_getOutputBuffer(videoEncoder, outputIndex, &outSize);
if (outputData) {
bool isKeyframe = (info.flags & AMEDIACODEC_BUFFER_FLAG_KEY_FRAME) != 0;
int64_t timestampMs = info.presentationTimeUs / 1000;
for (auto* sink : sinks) {
sink->SendVideoPacket(outputData + info.offset, info.size,
timestampMs, isKeyframe);
}
std::lock_guard<std::mutex> lock(statsMutex);
statsVideoBytes += info.size;
statsFrameCount++;
}
}
if (info.flags & AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG) {
// Sequence header (SPS/PPS) — forward to sinks
size_t outSize;
uint8_t* configData = AMediaCodec_getOutputBuffer(videoEncoder, outputIndex, &outSize);
if (configData) {
for (auto* sink : sinks) {
sink->OnVideoFormatReady(configData + info.offset, info.size);
}
}
}
AMediaCodec_releaseOutputBuffer(videoEncoder, outputIndex, false);
UpdateStats();
}
if (outputIndex == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
AMediaFormat* newFormat = AMediaCodec_getOutputFormat(videoEncoder);
if (newFormat) {
LOGI("Video encoder output format changed");
AMediaFormat_delete(newFormat);
}
}
}
void StreamingEngine::DrainAudioEncoder() {
if (!audioEncoder) return;
AMediaCodecBufferInfo info;
ssize_t outputIndex;
while ((outputIndex = AMediaCodec_dequeueOutputBuffer(audioEncoder, &info, 0)) >= 0) {
if (info.size > 0 && !(info.flags & AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG)) {
size_t outSize;
uint8_t* outputData = AMediaCodec_getOutputBuffer(audioEncoder, outputIndex, &outSize);
if (outputData) {
int64_t timestampMs = info.presentationTimeUs / 1000;
for (auto* sink : sinks) {
sink->SendAudioPacket(outputData + info.offset, info.size, timestampMs);
}
std::lock_guard<std::mutex> lock(statsMutex);
statsAudioBytes += info.size;
}
}
if (info.flags & AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG) {
// AAC config — sinks handle audio format via Open()
}
AMediaCodec_releaseOutputBuffer(audioEncoder, outputIndex, false);
}
}
void StreamingEngine::UpdateStats() {
auto now = std::chrono::steady_clock::now().time_since_epoch();
int64_t nowNs = std::chrono::duration_cast<std::chrono::nanoseconds>(now).count();
std::lock_guard<std::mutex> lock(statsMutex);
if (statsLastUpdateNs == 0) {
statsLastUpdateNs = nowNs;
return;
}
int64_t elapsedNs = nowNs - statsLastUpdateNs;
if (elapsedNs >= 1000000000LL) { // Every second
double elapsedSec = elapsedNs / 1000000000.0;
currentStats.videoBitrate = static_cast<int64_t>(statsVideoBytes * 8 / elapsedSec);
currentStats.audioBitrate = static_cast<int64_t>(statsAudioBytes * 8 / elapsedSec);
currentStats.fps = static_cast<int>(statsFrameCount / elapsedSec);
statsVideoBytes = 0;
statsAudioBytes = 0;
statsFrameCount = 0;
statsLastUpdateNs = nowNs;
if (statsCallback) {
statsCallback(currentStats);
}
}
}
void StreamingEngine::SubmitVideoFrame(AHardwareBuffer* buffer, int64_t timestampNs, int fenceFd) {
if (!running.load()) {
if (fenceFd >= 0) close(fenceFd);
return;
}
VideoFrame frame;
frame.buffer = buffer;
frame.timestampNs = timestampNs;
frame.fenceFd = fenceFd;
std::lock_guard<std::mutex> lock(videoMutex);
videoQueue.push_back(frame);
}
void StreamingEngine::SubmitAudioFrame(const uint8_t* pcmData, size_t pcmSize, int64_t timestampNs) {
if (!running.load()) return;
AudioFrame frame;
frame.pcmData.assign(pcmData, pcmData + pcmSize);
frame.timestampNs = timestampNs;
std::lock_guard<std::mutex> lock(audioMutex);
audioQueue.push_back(std::move(frame));
}
void StreamingEngine::Stop() {
if (!running.load()) return;
LOGI("Stopping streaming engine");
running.store(false);
if (encoderThread.joinable()) {
encoderThread.join();
}
LOGI("Streaming engine stopped");
}
void StreamingEngine::SetStatsCallback(StatsCallback callback) {
statsCallback = std::move(callback);
}
void StreamingEngine::SetErrorCallback(ErrorCallback callback) {
errorCallback = std::move(callback);
}
void StreamingEngine::SetBufferReleasedCallback(BufferReleasedCallback callback) {
bufferReleasedCallback = std::move(callback);
}

View File

@@ -0,0 +1,150 @@
#pragma once
#include "egl_context.h"
#include "rtmp_sink.h"
#include <media/NdkMediaCodec.h>
#include <media/NdkMediaFormat.h>
#include <android/hardware_buffer.h>
#include <android/native_window.h>
#include <atomic>
#include <cstdint>
#include <functional>
#include <mutex>
#include <string>
#include <thread>
#include <vector>
struct VideoFrame {
AHardwareBuffer* buffer;
int64_t timestampNs;
int fenceFd; // -1 if no fence
};
struct AudioFrame {
std::vector<uint8_t> pcmData;
int64_t timestampNs;
};
struct StreamingStats {
int64_t videoBitrate = 0;
int64_t audioBitrate = 0;
int fps = 0;
int droppedFrames = 0;
};
/**
* Streaming engine: imports HardwareBuffers via EGL, encodes with AMediaCodec,
* and streams via RTMP to one or more destinations.
*
* All encoding happens in native code (zero-copy pipeline).
*/
class StreamingEngine {
public:
using StatsCallback = std::function<void(const StreamingStats&)>;
using ErrorCallback = std::function<void(int code, const std::string& message)>;
using BufferReleasedCallback = std::function<void(int bufferIndex)>;
StreamingEngine();
~StreamingEngine();
/** Configure the engine. Must be called before Start(). */
bool Configure(int width, int height, int videoBitrate, int audioBitrate,
int sampleRate, int channels, int keyframeInterval);
/** Add an RTMP destination. Returns destination index. */
int AddDestination(const std::string& rtmpUrl);
/** Start encoding and streaming. */
bool Start();
/** Submit a video frame from HardwareBuffer. Non-blocking. */
void SubmitVideoFrame(AHardwareBuffer* buffer, int64_t timestampNs, int fenceFd);
/** Submit audio PCM data. Non-blocking. */
void SubmitAudioFrame(const uint8_t* pcmData, size_t pcmSize, int64_t timestampNs);
/** Stop encoding and streaming. Blocks until clean shutdown. */
void Stop();
/** Set callbacks. */
void SetStatsCallback(StatsCallback callback);
void SetErrorCallback(ErrorCallback callback);
void SetBufferReleasedCallback(BufferReleasedCallback callback);
bool IsRunning() const { return running.load(); }
private:
// Encoder thread
void EncoderThreadFunc();
void ProcessVideoFrame(const VideoFrame& frame);
void ProcessAudioFrame(const AudioFrame& frame);
void DrainVideoEncoder();
void DrainAudioEncoder();
void UpdateStats();
// Blit HardwareBuffer texture to encoder surface
void BlitToEncoder(GLuint srcTexture, int64_t timestampNs);
// Config
int width = 0;
int height = 0;
int videoBitrate = 6000000;
int audioBitrate = 128000;
int sampleRate = 48000;
int channels = 2;
int keyframeInterval = 2;
int framerate = 30;
// EGL
EglContext eglContext;
// Blit resources
GLuint blitProgram = 0;
GLuint blitVao = 0;
GLuint blitVbo = 0;
// Video encoder
AMediaCodec* videoEncoder = nullptr;
ANativeWindow* encoderSurface = nullptr;
// Audio encoder
AMediaCodec* audioEncoder = nullptr;
// RTMP sinks (one per destination)
std::vector<RtmpSink*> sinks;
// Threading
std::thread encoderThread;
std::atomic<bool> running{false};
// Frame queues (protected by mutex)
std::mutex videoMutex;
std::vector<VideoFrame> videoQueue;
std::mutex audioMutex;
std::vector<AudioFrame> audioQueue;
// Stats
std::mutex statsMutex;
StreamingStats currentStats;
int64_t statsVideoBytes = 0;
int64_t statsAudioBytes = 0;
int statsFrameCount = 0;
int64_t statsLastUpdateNs = 0;
// Start timestamp for relative timing
int64_t startTimestampNs = 0;
bool firstVideoFrame = true;
// Callbacks
StatsCallback statsCallback;
ErrorCallback errorCallback;
BufferReleasedCallback bufferReleasedCallback;
bool InitVideoEncoder();
bool InitAudioEncoder();
bool InitBlitResources();
void ReleaseBlitResources();
};

View File

@@ -0,0 +1,164 @@
#ifndef __AMF_H__
#define __AMF_H__
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdint.h>
#ifndef TRUE
#define TRUE 1
#define FALSE 0
#endif
#ifdef __cplusplus
extern "C"
{
#endif
typedef enum
{ AMF_NUMBER = 0, AMF_BOOLEAN, AMF_STRING, AMF_OBJECT,
AMF_MOVIECLIP, /* reserved, not used */
AMF_NULL, AMF_UNDEFINED, AMF_REFERENCE, AMF_ECMA_ARRAY, AMF_OBJECT_END,
AMF_STRICT_ARRAY, AMF_DATE, AMF_LONG_STRING, AMF_UNSUPPORTED,
AMF_RECORDSET, /* reserved, not used */
AMF_XML_DOC, AMF_TYPED_OBJECT,
AMF_AVMPLUS, /* switch to AMF3 */
AMF_INVALID = 0xff
} AMFDataType;
typedef enum
{ AMF3_UNDEFINED = 0, AMF3_NULL, AMF3_FALSE, AMF3_TRUE,
AMF3_INTEGER, AMF3_DOUBLE, AMF3_STRING, AMF3_XML_DOC, AMF3_DATE,
AMF3_ARRAY, AMF3_OBJECT, AMF3_XML, AMF3_BYTE_ARRAY
} AMF3DataType;
typedef struct AVal
{
char *av_val;
int av_len;
} AVal;
#define AVC(str) {str,sizeof(str)-1}
#define AVMATCH(a1,a2) ((a1)->av_len == (a2)->av_len && !memcmp((a1)->av_val,(a2)->av_val,(a1)->av_len))
struct AMFObjectProperty;
typedef struct AMFObject
{
int o_num;
struct AMFObjectProperty *o_props;
} AMFObject;
typedef struct AMFObjectProperty
{
AVal p_name;
AMFDataType p_type;
union
{
double p_number;
AVal p_aval;
AMFObject p_object;
} p_vu;
int16_t p_UTCoffset;
} AMFObjectProperty;
char *AMF_EncodeString(char *output, char *outend, const AVal * str);
char *AMF_EncodeNumber(char *output, char *outend, double dVal);
char *AMF_EncodeInt16(char *output, char *outend, short nVal);
char *AMF_EncodeInt24(char *output, char *outend, int nVal);
char *AMF_EncodeInt32(char *output, char *outend, int nVal);
char *AMF_EncodeBoolean(char *output, char *outend, int bVal);
/* Shortcuts for AMFProp_Encode */
char *AMF_EncodeNamedString(char *output, char *outend, const AVal * name, const AVal * value);
char *AMF_EncodeNamedNumber(char *output, char *outend, const AVal * name, double dVal);
char *AMF_EncodeNamedBoolean(char *output, char *outend, const AVal * name, int bVal);
unsigned short AMF_DecodeInt16(const char *data);
unsigned int AMF_DecodeInt24(const char *data);
unsigned int AMF_DecodeInt32(const char *data);
void AMF_DecodeString(const char *data, AVal * str);
void AMF_DecodeLongString(const char *data, AVal * str);
int AMF_DecodeBoolean(const char *data);
double AMF_DecodeNumber(const char *data);
char *AMF_Encode(AMFObject * obj, char *pBuffer, char *pBufEnd);
char *AMF_EncodeEcmaArray(AMFObject *obj, char *pBuffer, char *pBufEnd);
char *AMF_EncodeArray(AMFObject *obj, char *pBuffer, char *pBufEnd);
int AMF_Decode(AMFObject * obj, const char *pBuffer, int nSize,
int bDecodeName);
int AMF_DecodeArray(AMFObject * obj, const char *pBuffer, int nSize,
int nArrayLen, int bDecodeName);
int AMF3_Decode(AMFObject * obj, const char *pBuffer, int nSize,
int bDecodeName);
void AMF_Dump(AMFObject * obj);
void AMF_Reset(AMFObject * obj);
void AMF_AddProp(AMFObject * obj, const AMFObjectProperty * prop);
int AMF_CountProp(AMFObject * obj);
AMFObjectProperty *AMF_GetProp(AMFObject * obj, const AVal * name,
int nIndex);
AMFDataType AMFProp_GetType(AMFObjectProperty * prop);
void AMFProp_SetNumber(AMFObjectProperty * prop, double dval);
void AMFProp_SetBoolean(AMFObjectProperty * prop, int bflag);
void AMFProp_SetString(AMFObjectProperty * prop, AVal * str);
void AMFProp_SetObject(AMFObjectProperty * prop, AMFObject * obj);
void AMFProp_GetName(AMFObjectProperty * prop, AVal * name);
void AMFProp_SetName(AMFObjectProperty * prop, AVal * name);
double AMFProp_GetNumber(AMFObjectProperty * prop);
int AMFProp_GetBoolean(AMFObjectProperty * prop);
void AMFProp_GetString(AMFObjectProperty * prop, AVal * str);
void AMFProp_GetObject(AMFObjectProperty * prop, AMFObject * obj);
int AMFProp_IsValid(AMFObjectProperty * prop);
char *AMFProp_Encode(AMFObjectProperty * prop, char *pBuffer, char *pBufEnd);
int AMF3Prop_Decode(AMFObjectProperty * prop, const char *pBuffer,
int nSize, int bDecodeName);
int AMFProp_Decode(AMFObjectProperty * prop, const char *pBuffer,
int nSize, int bDecodeName);
void AMFProp_Dump(AMFObjectProperty * prop);
void AMFProp_Reset(AMFObjectProperty * prop);
typedef struct AMF3ClassDef
{
AVal cd_name;
char cd_externalizable;
char cd_dynamic;
int cd_num;
AVal *cd_props;
} AMF3ClassDef;
void AMF3CD_AddProp(AMF3ClassDef * cd, AVal * prop);
AVal *AMF3CD_GetProp(AMF3ClassDef * cd, int idx);
#ifdef __cplusplus
}
#endif
#endif /* __AMF_H__ */

View File

@@ -0,0 +1,91 @@
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifndef __BYTES_H__
#define __BYTES_H__
#include <stdint.h>
#ifdef _WIN32
/* Windows is little endian only */
#define __LITTLE_ENDIAN 1234
#define __BIG_ENDIAN 4321
#define __BYTE_ORDER __LITTLE_ENDIAN
#define __FLOAT_WORD_ORDER __BYTE_ORDER
typedef unsigned char uint8_t;
#else /* !_WIN32 */
#include <sys/param.h>
#if defined(BYTE_ORDER) && !defined(__BYTE_ORDER)
#define __BYTE_ORDER BYTE_ORDER
#endif
#if defined(BIG_ENDIAN) && !defined(__BIG_ENDIAN)
#define __BIG_ENDIAN BIG_ENDIAN
#endif
#if defined(LITTLE_ENDIAN) && !defined(__LITTLE_ENDIAN)
#define __LITTLE_ENDIAN LITTLE_ENDIAN
#endif
#endif /* !_WIN32 */
/* define default endianness */
#ifndef __LITTLE_ENDIAN
#define __LITTLE_ENDIAN 1234
#endif
#ifndef __BIG_ENDIAN
#define __BIG_ENDIAN 4321
#endif
#ifndef __BYTE_ORDER
#warning "Byte order not defined on your system, assuming little endian!"
#define __BYTE_ORDER __LITTLE_ENDIAN
#endif
/* ok, we assume to have the same float word order and byte order if float word order is not defined */
#ifndef __FLOAT_WORD_ORDER
#warning "Float word order not defined, assuming the same as byte order!"
#define __FLOAT_WORD_ORDER __BYTE_ORDER
#endif
#if !defined(__BYTE_ORDER) || !defined(__FLOAT_WORD_ORDER)
#error "Undefined byte or float word order!"
#endif
#if __FLOAT_WORD_ORDER != __BIG_ENDIAN && __FLOAT_WORD_ORDER != __LITTLE_ENDIAN
#error "Unknown/unsupported float word order!"
#endif
#if __BYTE_ORDER != __BIG_ENDIAN && __BYTE_ORDER != __LITTLE_ENDIAN
#error "Unknown/unsupported byte order!"
#endif
#endif

View File

@@ -0,0 +1,402 @@
/* RTMPDump - Diffie-Hellmann Key Exchange
* Copyright (C) 2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <limits.h>
#ifdef USE_POLARSSL
#include <polarssl/dhm.h>
typedef mpi * MP_t;
#define MP_new(m) m = malloc(sizeof(mpi)); mpi_init(m)
#define MP_set_w(mpi, w) mpi_lset(mpi, w)
#define MP_cmp(u, v) mpi_cmp_mpi(u, v)
#define MP_set(u, v) mpi_copy(u, v)
#define MP_sub_w(mpi, w) mpi_sub_int(mpi, mpi, w)
#define MP_cmp_1(mpi) mpi_cmp_int(mpi, 1)
#define MP_modexp(r, y, q, p) mpi_exp_mod(r, y, q, p, NULL)
#define MP_free(mpi) mpi_free(mpi); free(mpi)
#define MP_gethex(u, hex, res) MP_new(u); res = mpi_read_string(u, 16, hex) == 0
#define MP_bytes(u) mpi_size(u)
#define MP_setbin(u,buf,len) mpi_write_binary(u,buf,len)
#define MP_getbin(u,buf,len) MP_new(u); mpi_read_binary(u,buf,len)
#define MP_setpg(dh, p, g) dh->p = p; dh->g = g
#define MP_setlength(dh, l) dh->length = l
#define MP_getp(dh) dh->p
#define MP_getpubkey(dh) dh->pub_key
typedef struct MDH {
MP_t p;
MP_t g;
MP_t pub_key;
MP_t priv_key;
long length;
dhm_context ctx;
} MDH;
#define MDH_new() calloc(1,sizeof(MDH))
#define MDH_free(vp) {MDH *_dh = vp; dhm_free(&_dh->ctx); MP_free(_dh->p); MP_free(_dh->g); MP_free(_dh->pub_key); MP_free(_dh->priv_key); free(_dh);}
static int MDH_generate_key(MDH *dh)
{
unsigned char out[2];
MP_set(&dh->ctx.P, dh->p);
MP_set(&dh->ctx.G, dh->g);
dh->ctx.len = 128;
dhm_make_public(&dh->ctx, 1024, out, 1, havege_random, &RTMP_TLS_ctx->hs);
MP_new(dh->pub_key);
MP_new(dh->priv_key);
MP_set(dh->pub_key, &dh->ctx.GX);
MP_set(dh->priv_key, &dh->ctx.X);
return 1;
}
static int MDH_compute_key(uint8_t *secret, size_t len, MP_t pub, MDH *dh)
{
MP_set(&dh->ctx.GY, pub);
dhm_calc_secret(&dh->ctx, secret, &len);
return 0;
}
#elif defined(USE_GNUTLS)
#include <gmp.h>
#include <nettle/bignum.h>
#include <gnutls/crypto.h>
typedef mpz_ptr MP_t;
#define MP_new(m) m = malloc(sizeof(*m)); mpz_init2(m, 1)
#define MP_set_w(mpi, w) mpz_set_ui(mpi, w)
#define MP_cmp(u, v) mpz_cmp(u, v)
#define MP_set(u, v) mpz_set(u, v)
#define MP_sub_w(mpi, w) mpz_sub_ui(mpi, mpi, w)
#define MP_cmp_1(mpi) mpz_cmp_ui(mpi, 1)
#define MP_modexp(r, y, q, p) mpz_powm(r, y, q, p)
#define MP_free(mpi) mpz_clear(mpi); free(mpi)
#define MP_gethex(u, hex, res) u = malloc(sizeof(*u)); mpz_init2(u, 1); res = (mpz_set_str(u, hex, 16) == 0)
#define MP_bytes(u) (mpz_sizeinbase(u, 2) + 7) / 8
#define MP_setbin(u,buf,len) nettle_mpz_get_str_256(len,buf,u)
#define MP_getbin(u,buf,len) u = malloc(sizeof(*u)); mpz_init2(u, 1); nettle_mpz_set_str_256_u(u,len,buf)
#define MP_setpg(dh, p, g) dh->p = p; dh->g = g
#define MP_setlength(dh, l) dh->length = l
#define MP_getp(dh) dh->p
#define MP_getpubkey(dh) dh->pub_key
typedef struct MDH {
MP_t p;
MP_t g;
MP_t pub_key;
MP_t priv_key;
long length;
} MDH;
#define MDH_new() calloc(1,sizeof(MDH))
#define MDH_free(dh) do {MP_free(((MDH*)(dh))->p); MP_free(((MDH*)(dh))->g); MP_free(((MDH*)(dh))->pub_key); MP_free(((MDH*)(dh))->priv_key); free(dh);} while(0)
static int MDH_generate_key(MDH *dh)
{
int num_bytes;
uint32_t seed;
gmp_randstate_t rs;
num_bytes = (mpz_sizeinbase(dh->p, 2) + 7) / 8 - 1;
if (num_bytes <= 0 || num_bytes > 18000)
return 0;
dh->priv_key = calloc(1, sizeof(*dh->priv_key));
if (!dh->priv_key)
return 0;
mpz_init2(dh->priv_key, 1);
gnutls_rnd(GNUTLS_RND_RANDOM, &seed, sizeof(seed));
gmp_randinit_mt(rs);
gmp_randseed_ui(rs, seed);
mpz_urandomb(dh->priv_key, rs, num_bytes);
gmp_randclear(rs);
dh->pub_key = calloc(1, sizeof(*dh->pub_key));
if (!dh->pub_key)
return 0;
mpz_init2(dh->pub_key, 1);
if (!dh->pub_key) {
mpz_clear(dh->priv_key);
free(dh->priv_key);
return 0;
}
mpz_powm(dh->pub_key, dh->g, dh->priv_key, dh->p);
return 1;
}
static int MDH_compute_key(uint8_t *secret, size_t len, MP_t pub, MDH *dh)
{
mpz_ptr k;
int num_bytes;
num_bytes = (mpz_sizeinbase(dh->p, 2) + 7) / 8;
if (num_bytes <= 0 || num_bytes > 18000)
return -1;
k = calloc(1, sizeof(*k));
if (!k)
return -1;
mpz_init2(k, 1);
mpz_powm(k, pub, dh->priv_key, dh->p);
nettle_mpz_get_str_256(len, secret, k);
mpz_clear(k);
free(k);
/* return the length of the shared secret key like DH_compute_key */
return len;
}
#else /* USE_OPENSSL */
#include <openssl/bn.h>
#include <openssl/dh.h>
typedef BIGNUM * MP_t;
#define MP_new(m) m = BN_new()
#define MP_set_w(mpi, w) BN_set_word(mpi, w)
#define MP_cmp(u, v) BN_cmp(u, v)
#define MP_set(u, v) BN_copy(u, v)
#define MP_sub_w(mpi, w) BN_sub_word(mpi, w)
#define MP_cmp_1(mpi) BN_cmp(mpi, BN_value_one())
#define MP_modexp(r, y, q, p) do {BN_CTX *ctx = BN_CTX_new(); BN_mod_exp(r, y, q, p, ctx); BN_CTX_free(ctx);} while(0)
#define MP_free(mpi) BN_free(mpi)
#define MP_gethex(u, hex, res) res = BN_hex2bn(&u, hex)
#define MP_bytes(u) BN_num_bytes(u)
#define MP_setbin(u,buf,len) BN_bn2bin(u,buf)
#define MP_getbin(u,buf,len) u = BN_bin2bn(buf,len,0)
#define MDH DH
#define MDH_new() DH_new()
#define MDH_free(dh) DH_free(dh)
#define MDH_generate_key(dh) DH_generate_key(dh)
#define MDH_compute_key(secret, seclen, pub, dh) DH_compute_key(secret, pub, dh)
#if OPENSSL_VERSION_NUMBER >= 0x10100000
#define MP_setpg(dh, p, g) DH_set0_pqg(dh, p, NULL, g)
#define MP_setlength(dh, l) DH_set_length(dh, l)
#define MP_getp(dh) DH_get0_p(dh)
#define MP_getpubkey(dh) DH_get0_pub_key(dh)
#else
#define MP_setpg(dh, p, g) dh->p = p; dh->g = g
#define MP_setlength(dh, l) dh->length = l
#define MP_getp(dh) dh->p
#define MP_getpubkey(dh) dh->pub_key
#endif
#endif
#include "log.h"
#include "dhgroups.h"
/* RFC 2631, Section 2.1.5, http://www.ietf.org/rfc/rfc2631.txt */
static int
isValidPublicKey(MP_t y, MP_t p, MP_t q)
{
int ret = TRUE;
MP_t bn;
assert(y);
MP_new(bn);
assert(bn);
/* y must lie in [2,p-1] */
MP_set_w(bn, 1);
if (MP_cmp(y, bn) < 0)
{
RTMP_Log(RTMP_LOGERROR, "DH public key must be at least 2");
ret = FALSE;
goto failed;
}
/* bn = p-2 */
MP_set(bn, p);
MP_sub_w(bn, 1);
if (MP_cmp(y, bn) > 0)
{
RTMP_Log(RTMP_LOGERROR, "DH public key must be at most p-2");
ret = FALSE;
goto failed;
}
/* Verify with Sophie-Germain prime
*
* This is a nice test to make sure the public key position is calculated
* correctly. This test will fail in about 50% of the cases if applied to
* random data.
*/
if (q)
{
/* y must fulfill y^q mod p = 1 */
MP_modexp(bn, y, q, p);
if (MP_cmp_1(bn) != 0)
{
RTMP_Log(RTMP_LOGWARNING, "DH public key does not fulfill y^q mod p = 1");
}
}
failed:
MP_free(bn);
return ret;
}
static MDH *
DHInit(int nKeyBits)
{
size_t res;
MDH *dh = MDH_new();
MP_t g, p;
if (!dh)
goto failed;
MP_new(g);
if (!g)
goto failed;
MP_gethex(p, P1024, res); /* prime P1024, see dhgroups.h */
if (!res)
{
goto failed;
}
MP_set_w(g, 2); /* base 2 */
MP_setpg(dh, p, g);
MP_setlength(dh, nKeyBits);
return dh;
failed:
if (dh)
MDH_free(dh);
return 0;
}
static int
DHGenerateKey(MDH *dh)
{
MP_t q1;
size_t res;
if (!dh)
return 0;
MP_gethex(q1, Q1024, res);
assert(res);
do
{
if (MDH_generate_key(dh))
{
MP_t key = (MP_t)MP_getpubkey(dh);
MP_t p = (MP_t)MP_getp(dh);
res = isValidPublicKey(key, p, q1);
}
else
{
#if !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000
MP_free(dh->pub_key);
MP_free(dh->priv_key);
dh->pub_key = dh->priv_key = 0;
#endif
res = 0;
break;
}
} while (!res);
MP_free(q1);
return res;
}
/* fill pubkey with the public key in BIG ENDIAN order
* 00 00 00 00 00 x1 x2 x3 .....
*/
static int
DHGetPublicKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen)
{
int len;
MP_t pub_key;
if (!dh || !(pub_key = (MP_t)MP_getpubkey(dh)))
return 0;
len = MP_bytes(pub_key);
if (len <= 0 || len > (int) nPubkeyLen)
return 0;
memset(pubkey, 0, nPubkeyLen);
MP_setbin(pub_key, pubkey + (nPubkeyLen - len), len);
return 1;
}
#if 0 /* unused */
static int
DHGetPrivateKey(MDH *dh, uint8_t *privkey, size_t nPrivkeyLen)
{
if (!dh || !dh->priv_key)
return 0;
int len = MP_bytes(dh->priv_key);
if (len <= 0 || len > (int) nPrivkeyLen)
return 0;
memset(privkey, 0, nPrivkeyLen);
MP_setbin(dh->priv_key, privkey + (nPrivkeyLen - len), len);
return 1;
}
#endif
/* computes the shared secret key from the private MDH value and the
* other party's public key (pubkey)
*/
static int
DHComputeSharedSecretKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen,
uint8_t *secret)
{
MP_t q1 = NULL, pubkeyBn = NULL;
size_t len;
int res;
if (!dh || !secret || nPubkeyLen >= INT_MAX)
return -1;
MP_getbin(pubkeyBn, pubkey, nPubkeyLen);
if (!pubkeyBn)
return -1;
MP_gethex(q1, Q1024, len);
assert(len);
if (isValidPublicKey(pubkeyBn, (MP_t)MP_getp(dh), q1))
res = MDH_compute_key(secret, nPubkeyLen, pubkeyBn, dh);
else
res = -1;
MP_free(q1);
MP_free(pubkeyBn);
return res;
}

View File

@@ -0,0 +1,199 @@
/* librtmp - Diffie-Hellmann Key Exchange
* Copyright (C) 2009 Andrej Stepanchuk
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
/* from RFC 3526, see http://www.ietf.org/rfc/rfc3526.txt */
/* 2^768 - 2 ^704 - 1 + 2^64 * { [2^638 pi] + 149686 } */
#define P768 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A63A3620FFFFFFFFFFFFFFFF"
/* 2^1024 - 2^960 - 1 + 2^64 * { [2^894 pi] + 129093 } */
#define P1024 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381" \
"FFFFFFFFFFFFFFFF"
/* Group morder largest prime factor: */
#define Q1024 \
"7FFFFFFFFFFFFFFFE487ED5110B4611A62633145C06E0E68" \
"948127044533E63A0105DF531D89CD9128A5043CC71A026E" \
"F7CA8CD9E69D218D98158536F92F8A1BA7F09AB6B6A8E122" \
"F242DABB312F3F637A262174D31BF6B585FFAE5B7A035BF6" \
"F71C35FDAD44CFD2D74F9208BE258FF324943328F67329C0" \
"FFFFFFFFFFFFFFFF"
/* 2^1536 - 2^1472 - 1 + 2^64 * { [2^1406 pi] + 741804 } */
#define P1536 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF"
/* 2^2048 - 2^1984 - 1 + 2^64 * { [2^1918 pi] + 124476 } */
#define P2048 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AACAA68FFFFFFFFFFFFFFFF"
/* 2^3072 - 2^3008 - 1 + 2^64 * { [2^2942 pi] + 1690314 } */
#define P3072 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A93AD2CAFFFFFFFFFFFFFFFF"
/* 2^4096 - 2^4032 - 1 + 2^64 * { [2^3966 pi] + 240904 } */
#define P4096 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199" \
"FFFFFFFFFFFFFFFF"
/* 2^6144 - 2^6080 - 1 + 2^64 * { [2^6014 pi] + 929484 } */
#define P6144 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934028492" \
"36C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BD" \
"F8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831" \
"179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1B" \
"DB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF" \
"5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6" \
"D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F3" \
"23A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AA" \
"CC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE328" \
"06A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55C" \
"DA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE" \
"12BF2D5B0B7474D6E694F91E6DCC4024FFFFFFFFFFFFFFFF"
/* 2^8192 - 2^8128 - 1 + 2^64 * { [2^8062 pi] + 4743158 } */
#define P8192 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934028492" \
"36C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BD" \
"F8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831" \
"179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1B" \
"DB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF" \
"5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6" \
"D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F3" \
"23A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AA" \
"CC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE328" \
"06A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55C" \
"DA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE" \
"12BF2D5B0B7474D6E694F91E6DBE115974A3926F12FEE5E4" \
"38777CB6A932DF8CD8BEC4D073B931BA3BC832B68D9DD300" \
"741FA7BF8AFC47ED2576F6936BA424663AAB639C5AE4F568" \
"3423B4742BF1C978238F16CBE39D652DE3FDB8BEFC848AD9" \
"22222E04A4037C0713EB57A81A23F0C73473FC646CEA306B" \
"4BCBC8862F8385DDFA9D4B7FA2C087E879683303ED5BDD3A" \
"062B3CF5B3A278A66D2A13F83F44F82DDF310EE074AB6A36" \
"4597E899A0255DC164F31CC50846851DF9AB48195DED7EA1" \
"B1D510BD7EE74D73FAF36BC31ECFA268359046F4EB879F92" \
"4009438B481C6CD7889A002ED5EE382BC9190DA6FC026E47" \
"9558E4475677E9AA9E3050E2765694DFC81F56E880B96E71" \
"60C980DD98EDD3DFFFFFFFFFFFFFFFFF"

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,47 @@
#ifndef __RTMP_HTTP_H__
#define __RTMP_HTTP_H__
/*
* Copyright (C) 2010 Howard Chu
* Copyright (C) 2010 Antti Ajanki
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
typedef enum {
HTTPRES_OK, /* result OK */
HTTPRES_OK_NOT_MODIFIED, /* not modified since last request */
HTTPRES_NOT_FOUND, /* not found */
HTTPRES_BAD_REQUEST, /* client error */
HTTPRES_SERVER_ERROR, /* server reported an error */
HTTPRES_REDIRECTED, /* resource has been moved */
HTTPRES_LOST_CONNECTION /* connection lost while waiting for data */
} HTTPResult;
struct HTTP_ctx {
char *date;
int size;
int status;
void *data;
};
typedef size_t (HTTP_read_callback)(void *ptr, size_t size, size_t nmemb, void *stream);
HTTPResult HTTP_get(struct HTTP_ctx *http, const char *url, HTTP_read_callback *cb);
#endif

View File

@@ -0,0 +1,69 @@
/*
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifndef __RTMP_LOG_H__
#define __RTMP_LOG_H__
#include <stdio.h>
#include <stdarg.h>
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
/* Enable this to get full debugging output */
/* #define _DEBUG */
#ifdef _DEBUG
#undef NODEBUG
#endif
typedef enum
{ RTMP_LOGCRIT=0, RTMP_LOGERROR, RTMP_LOGWARNING, RTMP_LOGINFO,
RTMP_LOGDEBUG, RTMP_LOGDEBUG2, RTMP_LOGALL
} RTMP_LogLevel;
extern RTMP_LogLevel RTMP_debuglevel;
typedef void (RTMP_LogCallback)(int level, const char *fmt, va_list);
void RTMP_LogSetCallback(RTMP_LogCallback *cb);
void RTMP_LogSetOutput(FILE *file);
#ifdef __GNUC__
void RTMP_LogPrintf(const char *format, ...) __attribute__ ((__format__ (__printf__, 1, 2)));
void RTMP_LogStatus(const char *format, ...) __attribute__ ((__format__ (__printf__, 1, 2)));
void RTMP_Log(int level, const char *format, ...) __attribute__ ((__format__ (__printf__, 2, 3)));
#else
void RTMP_LogPrintf(const char *format, ...);
void RTMP_LogStatus(const char *format, ...);
void RTMP_Log(int level, const char *format, ...);
#endif
void RTMP_LogHex(int level, const uint8_t *data, unsigned long len);
void RTMP_LogHexString(int level, const uint8_t *data, unsigned long len);
void RTMP_LogSetLevel(RTMP_LogLevel lvl);
RTMP_LogLevel RTMP_LogGetLevel(void);
#ifdef __cplusplus
}
#endif
#endif

View File

@@ -0,0 +1,378 @@
#ifndef __RTMP_H__
#define __RTMP_H__
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#if !defined(NO_CRYPTO) && !defined(CRYPTO)
#define CRYPTO
#endif
#include <errno.h>
#include <stdint.h>
#include <stddef.h>
#include "amf.h"
#ifdef __cplusplus
extern "C"
{
#endif
#define RTMP_LIB_VERSION 0x020300 /* 2.3 */
#define RTMP_FEATURE_HTTP 0x01
#define RTMP_FEATURE_ENC 0x02
#define RTMP_FEATURE_SSL 0x04
#define RTMP_FEATURE_MFP 0x08 /* not yet supported */
#define RTMP_FEATURE_WRITE 0x10 /* publish, not play */
#define RTMP_FEATURE_HTTP2 0x20 /* server-side rtmpt */
#define RTMP_PROTOCOL_UNDEFINED -1
#define RTMP_PROTOCOL_RTMP 0
#define RTMP_PROTOCOL_RTMPE RTMP_FEATURE_ENC
#define RTMP_PROTOCOL_RTMPT RTMP_FEATURE_HTTP
#define RTMP_PROTOCOL_RTMPS RTMP_FEATURE_SSL
#define RTMP_PROTOCOL_RTMPTE (RTMP_FEATURE_HTTP|RTMP_FEATURE_ENC)
#define RTMP_PROTOCOL_RTMPTS (RTMP_FEATURE_HTTP|RTMP_FEATURE_SSL)
#define RTMP_PROTOCOL_RTMFP RTMP_FEATURE_MFP
#define RTMP_DEFAULT_CHUNKSIZE 128
/* needs to fit largest number of bytes recv() may return */
#define RTMP_BUFFER_CACHE_SIZE (16*1024)
#define RTMP_CHANNELS 65600
extern const char RTMPProtocolStringsLower[][7];
extern const AVal RTMP_DefaultFlashVer;
extern int RTMP_ctrlC;
uint32_t RTMP_GetTime(void);
/* RTMP_PACKET_TYPE_... 0x00 */
#define RTMP_PACKET_TYPE_CHUNK_SIZE 0x01
/* RTMP_PACKET_TYPE_... 0x02 */
#define RTMP_PACKET_TYPE_BYTES_READ_REPORT 0x03
#define RTMP_PACKET_TYPE_CONTROL 0x04
#define RTMP_PACKET_TYPE_SERVER_BW 0x05
#define RTMP_PACKET_TYPE_CLIENT_BW 0x06
/* RTMP_PACKET_TYPE_... 0x07 */
#define RTMP_PACKET_TYPE_AUDIO 0x08
#define RTMP_PACKET_TYPE_VIDEO 0x09
/* RTMP_PACKET_TYPE_... 0x0A */
/* RTMP_PACKET_TYPE_... 0x0B */
/* RTMP_PACKET_TYPE_... 0x0C */
/* RTMP_PACKET_TYPE_... 0x0D */
/* RTMP_PACKET_TYPE_... 0x0E */
#define RTMP_PACKET_TYPE_FLEX_STREAM_SEND 0x0F
#define RTMP_PACKET_TYPE_FLEX_SHARED_OBJECT 0x10
#define RTMP_PACKET_TYPE_FLEX_MESSAGE 0x11
#define RTMP_PACKET_TYPE_INFO 0x12
#define RTMP_PACKET_TYPE_SHARED_OBJECT 0x13
#define RTMP_PACKET_TYPE_INVOKE 0x14
/* RTMP_PACKET_TYPE_... 0x15 */
#define RTMP_PACKET_TYPE_FLASH_VIDEO 0x16
#define RTMP_MAX_HEADER_SIZE 18
#define RTMP_PACKET_SIZE_LARGE 0
#define RTMP_PACKET_SIZE_MEDIUM 1
#define RTMP_PACKET_SIZE_SMALL 2
#define RTMP_PACKET_SIZE_MINIMUM 3
typedef struct RTMPChunk
{
int c_headerSize;
int c_chunkSize;
char *c_chunk;
char c_header[RTMP_MAX_HEADER_SIZE];
} RTMPChunk;
typedef struct RTMPPacket
{
uint8_t m_headerType;
uint8_t m_packetType;
uint8_t m_hasAbsTimestamp; /* timestamp absolute or relative? */
int m_nChannel;
uint32_t m_nTimeStamp; /* timestamp */
int32_t m_nInfoField2; /* last 4 bytes in a long header */
uint32_t m_nBodySize;
uint32_t m_nBytesRead;
RTMPChunk *m_chunk;
char *m_body;
} RTMPPacket;
typedef struct RTMPSockBuf
{
int sb_socket;
int sb_size; /* number of unprocessed bytes in buffer */
char *sb_start; /* pointer into sb_pBuffer of next byte to process */
char sb_buf[RTMP_BUFFER_CACHE_SIZE]; /* data read from socket */
int sb_timedout;
void *sb_ssl;
} RTMPSockBuf;
void RTMPPacket_Reset(RTMPPacket *p);
void RTMPPacket_Dump(RTMPPacket *p);
int RTMPPacket_Alloc(RTMPPacket *p, uint32_t nSize);
void RTMPPacket_Free(RTMPPacket *p);
#define RTMPPacket_IsReady(a) ((a)->m_nBytesRead == (a)->m_nBodySize)
typedef struct RTMP_LNK
{
AVal hostname;
AVal sockshost;
AVal playpath0; /* parsed from URL */
AVal playpath; /* passed in explicitly */
AVal tcUrl;
AVal swfUrl;
AVal pageUrl;
AVal app;
AVal auth;
AVal flashVer;
AVal subscribepath;
AVal usherToken;
AVal token;
AVal pubUser;
AVal pubPasswd;
AMFObject extras;
int edepth;
int seekTime;
int stopTime;
#define RTMP_LF_AUTH 0x0001 /* using auth param */
#define RTMP_LF_LIVE 0x0002 /* stream is live */
#define RTMP_LF_SWFV 0x0004 /* do SWF verification */
#define RTMP_LF_PLST 0x0008 /* send playlist before play */
#define RTMP_LF_BUFX 0x0010 /* toggle stream on BufferEmpty msg */
#define RTMP_LF_FTCU 0x0020 /* free tcUrl on close */
#define RTMP_LF_FAPU 0x0040 /* free app on close */
int lFlags;
int swfAge;
int protocol;
int timeout; /* connection timeout in seconds */
int pFlags; /* unused, but kept to avoid breaking ABI */
unsigned short socksport;
unsigned short port;
#ifdef CRYPTO
#define RTMP_SWF_HASHLEN 32
void *dh; /* for encryption */
void *rc4keyIn;
void *rc4keyOut;
uint32_t SWFSize;
uint8_t SWFHash[RTMP_SWF_HASHLEN];
char SWFVerificationResponse[RTMP_SWF_HASHLEN+10];
#endif
} RTMP_LNK;
/* state for read() wrapper */
typedef struct RTMP_READ
{
char *buf;
char *bufpos;
unsigned int buflen;
uint32_t timestamp;
uint8_t dataType;
uint8_t flags;
#define RTMP_READ_HEADER 0x01
#define RTMP_READ_RESUME 0x02
#define RTMP_READ_NO_IGNORE 0x04
#define RTMP_READ_GOTKF 0x08
#define RTMP_READ_GOTFLVK 0x10
#define RTMP_READ_SEEKING 0x20
int8_t status;
#define RTMP_READ_COMPLETE -3
#define RTMP_READ_ERROR -2
#define RTMP_READ_EOF -1
#define RTMP_READ_IGNORE 0
/* if bResume == TRUE */
uint8_t initialFrameType;
uint32_t nResumeTS;
char *metaHeader;
char *initialFrame;
uint32_t nMetaHeaderSize;
uint32_t nInitialFrameSize;
uint32_t nIgnoredFrameCounter;
uint32_t nIgnoredFlvFrameCounter;
} RTMP_READ;
typedef struct RTMP_METHOD
{
AVal name;
int num;
} RTMP_METHOD;
typedef struct RTMP
{
int m_inChunkSize;
int m_outChunkSize;
int m_nBWCheckCounter;
int m_nBytesIn;
int m_nBytesInSent;
int m_nBufferMS;
int m_stream_id; /* returned in _result from createStream */
int m_mediaChannel;
uint32_t m_mediaStamp;
uint32_t m_pauseStamp;
int m_pausing;
int m_nServerBW;
int m_nClientBW;
uint8_t m_nClientBW2;
uint8_t m_bPlaying;
uint8_t m_bSendEncoding;
uint8_t m_bSendCounter;
int m_numInvokes;
int m_numCalls;
RTMP_METHOD *m_methodCalls; /* remote method calls queue */
int m_channelsAllocatedIn;
int m_channelsAllocatedOut;
RTMPPacket **m_vecChannelsIn;
RTMPPacket **m_vecChannelsOut;
int *m_channelTimestamp; /* abs timestamp of last packet */
double m_fAudioCodecs; /* audioCodecs for the connect packet */
double m_fVideoCodecs; /* videoCodecs for the connect packet */
double m_fEncoding; /* AMF0 or AMF3 */
double m_fDuration; /* duration of stream in seconds */
int m_msgCounter; /* RTMPT stuff */
int m_polling;
int m_resplen;
int m_unackd;
AVal m_clientID;
RTMP_READ m_read;
RTMPPacket m_write;
RTMPSockBuf m_sb;
RTMP_LNK Link;
} RTMP;
int RTMP_ParseURL(const char *url, int *protocol, AVal *host,
unsigned int *port, AVal *playpath, AVal *app);
void RTMP_ParsePlaypath(AVal *in, AVal *out);
void RTMP_SetBufferMS(RTMP *r, int size);
void RTMP_UpdateBufferMS(RTMP *r);
int RTMP_SetOpt(RTMP *r, const AVal *opt, AVal *arg);
int RTMP_SetupURL(RTMP *r, char *url);
void RTMP_SetupStream(RTMP *r, int protocol,
AVal *hostname,
unsigned int port,
AVal *sockshost,
AVal *playpath,
AVal *tcUrl,
AVal *swfUrl,
AVal *pageUrl,
AVal *app,
AVal *auth,
AVal *swfSHA256Hash,
uint32_t swfSize,
AVal *flashVer,
AVal *subscribepath,
AVal *usherToken,
int dStart,
int dStop, int bLiveStream, long int timeout);
int RTMP_Connect(RTMP *r, RTMPPacket *cp);
struct sockaddr;
int RTMP_Connect0(RTMP *r, struct sockaddr *svc);
int RTMP_Connect1(RTMP *r, RTMPPacket *cp);
int RTMP_Serve(RTMP *r);
int RTMP_TLS_Accept(RTMP *r, void *ctx);
int RTMP_ReadPacket(RTMP *r, RTMPPacket *packet);
int RTMP_SendPacket(RTMP *r, RTMPPacket *packet, int queue);
int RTMP_SendChunk(RTMP *r, RTMPChunk *chunk);
int RTMP_IsConnected(RTMP *r);
int RTMP_Socket(RTMP *r);
int RTMP_IsTimedout(RTMP *r);
double RTMP_GetDuration(RTMP *r);
int RTMP_ToggleStream(RTMP *r);
int RTMP_ConnectStream(RTMP *r, int seekTime);
int RTMP_ReconnectStream(RTMP *r, int seekTime);
void RTMP_DeleteStream(RTMP *r);
int RTMP_GetNextMediaPacket(RTMP *r, RTMPPacket *packet);
int RTMP_ClientPacket(RTMP *r, RTMPPacket *packet);
void RTMP_Init(RTMP *r);
void RTMP_Close(RTMP *r);
RTMP *RTMP_Alloc(void);
void RTMP_Free(RTMP *r);
void RTMP_EnableWrite(RTMP *r);
void *RTMP_TLS_AllocServerContext(const char* cert, const char* key);
void RTMP_TLS_FreeServerContext(void *ctx);
int RTMP_LibVersion(void);
void RTMP_UserInterrupt(void); /* user typed Ctrl-C */
int RTMP_SendCtrl(RTMP *r, short nType, unsigned int nObject,
unsigned int nTime);
/* caller probably doesn't know current timestamp, should
* just use RTMP_Pause instead
*/
int RTMP_SendPause(RTMP *r, int DoPause, int dTime);
int RTMP_Pause(RTMP *r, int DoPause);
int RTMP_FindFirstMatchingProperty(AMFObject *obj, const AVal *name,
AMFObjectProperty * p);
int RTMPSockBuf_Fill(RTMPSockBuf *sb);
int RTMPSockBuf_Send(RTMPSockBuf *sb, const char *buf, int len);
int RTMPSockBuf_Close(RTMPSockBuf *sb);
int RTMP_SendCreateStream(RTMP *r);
int RTMP_SendSeek(RTMP *r, int dTime);
int RTMP_SendServerBW(RTMP *r);
int RTMP_SendClientBW(RTMP *r);
void RTMP_DropRequest(RTMP *r, int i, int freeit);
int RTMP_Read(RTMP *r, char *buf, int size);
int RTMP_Write(RTMP *r, const char *buf, int size);
/* hashswf.c */
int RTMP_HashSWF(const char *url, unsigned int *size, unsigned char *hash,
int age);
#ifdef __cplusplus
};
#endif
#endif

View File

@@ -0,0 +1,141 @@
#ifndef __RTMP_SYS_H__
#define __RTMP_SYS_H__
/*
* Copyright (C) 2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifdef _WIN32
#include <winsock2.h>
#include <ws2tcpip.h>
#ifdef _MSC_VER /* MSVC */
#if _MSC_VER < 1900
#define snprintf _snprintf
#define vsnprintf _vsnprintf
#endif
#define strcasecmp _stricmp
#define strncasecmp _strnicmp
#endif
#define GetSockError() WSAGetLastError()
#define SetSockError(e) WSASetLastError(e)
#define setsockopt(a,b,c,d,e) (setsockopt)(a,b,c,(const char *)d,(int)e)
#define EWOULDBLOCK WSAETIMEDOUT /* we don't use nonblocking, but we do use timeouts */
#define sleep(n) Sleep(n*1000)
#define msleep(n) Sleep(n)
#define SET_RCVTIMEO(tv,s) int tv = s*1000
#else /* !_WIN32 */
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/times.h>
#include <netdb.h>
#include <unistd.h>
#include <netinet/in.h>
#include <netinet/tcp.h>
#include <arpa/inet.h>
#define GetSockError() errno
#define SetSockError(e) errno = e
#undef closesocket
#define closesocket(s) close(s)
#define msleep(n) usleep(n*1000)
#define SET_RCVTIMEO(tv,s) struct timeval tv = {s,0}
#endif
#include "rtmp.h"
#ifdef USE_POLARSSL
#include <polarssl/version.h>
#include <polarssl/net.h>
#include <polarssl/ssl.h>
#include <polarssl/havege.h>
#if POLARSSL_VERSION_NUMBER < 0x01010000
#define havege_random havege_rand
#endif
#if POLARSSL_VERSION_NUMBER >= 0x01020000
#define SSL_SET_SESSION(S,resume,timeout,ctx) ssl_set_session(S,ctx)
#else
#define SSL_SET_SESSION(S,resume,timeout,ctx) ssl_set_session(S,resume,timeout,ctx)
#endif
typedef struct tls_ctx {
havege_state hs;
ssl_session ssn;
} tls_ctx;
typedef struct tls_server_ctx {
havege_state *hs;
x509_cert cert;
rsa_context key;
ssl_session ssn;
const char *dhm_P, *dhm_G;
} tls_server_ctx;
#define TLS_CTX tls_ctx *
#define TLS_client(ctx,s) s = malloc(sizeof(ssl_context)); ssl_init(s);\
ssl_set_endpoint(s, SSL_IS_CLIENT); ssl_set_authmode(s, SSL_VERIFY_NONE);\
ssl_set_rng(s, havege_random, &ctx->hs);\
ssl_set_ciphersuites(s, ssl_default_ciphersuites);\
SSL_SET_SESSION(s, 1, 600, &ctx->ssn)
#define TLS_server(ctx,s) s = malloc(sizeof(ssl_context)); ssl_init(s);\
ssl_set_endpoint(s, SSL_IS_SERVER); ssl_set_authmode(s, SSL_VERIFY_NONE);\
ssl_set_rng(s, havege_random, ((tls_server_ctx*)ctx)->hs);\
ssl_set_ciphersuites(s, ssl_default_ciphersuites);\
SSL_SET_SESSION(s, 1, 600, &((tls_server_ctx*)ctx)->ssn);\
ssl_set_own_cert(s, &((tls_server_ctx*)ctx)->cert, &((tls_server_ctx*)ctx)->key);\
ssl_set_dh_param(s, ((tls_server_ctx*)ctx)->dhm_P, ((tls_server_ctx*)ctx)->dhm_G)
#define TLS_setfd(s,fd) ssl_set_bio(s, net_recv, &fd, net_send, &fd)
#define TLS_connect(s) ssl_handshake(s)
#define TLS_accept(s) ssl_handshake(s)
#define TLS_read(s,b,l) ssl_read(s,(unsigned char *)b,l)
#define TLS_write(s,b,l) ssl_write(s,(unsigned char *)b,l)
#define TLS_shutdown(s) ssl_close_notify(s)
#define TLS_close(s) ssl_free(s); free(s)
#elif defined(USE_GNUTLS)
#include <gnutls/gnutls.h>
typedef struct tls_ctx {
gnutls_certificate_credentials_t cred;
gnutls_priority_t prios;
} tls_ctx;
#define TLS_CTX tls_ctx *
#define TLS_client(ctx,s) gnutls_init((gnutls_session_t *)(&s), GNUTLS_CLIENT); gnutls_priority_set(s, ctx->prios); gnutls_credentials_set(s, GNUTLS_CRD_CERTIFICATE, ctx->cred)
#define TLS_server(ctx,s) gnutls_init((gnutls_session_t *)(&s), GNUTLS_SERVER); gnutls_priority_set_direct(s, "NORMAL", NULL); gnutls_credentials_set(s, GNUTLS_CRD_CERTIFICATE, ctx)
#define TLS_setfd(s,fd) gnutls_transport_set_ptr(s, (gnutls_transport_ptr_t)(long)fd)
#define TLS_connect(s) gnutls_handshake(s)
#define TLS_accept(s) gnutls_handshake(s)
#define TLS_read(s,b,l) gnutls_record_recv(s,b,l)
#define TLS_write(s,b,l) gnutls_record_send(s,b,l)
#define TLS_shutdown(s) gnutls_bye(s, GNUTLS_SHUT_RDWR)
#define TLS_close(s) gnutls_deinit(s)
#else /* USE_OPENSSL */
#define TLS_CTX SSL_CTX *
#define TLS_client(ctx,s) s = SSL_new(ctx)
#define TLS_server(ctx,s) s = SSL_new(ctx)
#define TLS_setfd(s,fd) SSL_set_fd(s,fd)
#define TLS_connect(s) SSL_connect(s)
#define TLS_accept(s) SSL_accept(s)
#define TLS_read(s,b,l) SSL_read(s,b,l)
#define TLS_write(s,b,l) SSL_write(s,b,l)
#define TLS_shutdown(s) SSL_shutdown(s)
#define TLS_close(s) SSL_free(s)
#endif
#endif

View File

@@ -16,7 +16,7 @@ import com.omixlab.lckcontrol.data.local.entity.StreamPlanEntity
StreamPlanEntity::class,
StreamDestinationEntity::class,
],
version = 3,
version = 5,
exportSchema = false,
)
abstract class LckDatabase : RoomDatabase() {
@@ -96,5 +96,18 @@ abstract class LckDatabase : RoomDatabase() {
db.execSQL("ALTER TABLE stream_destinations ADD COLUMN linkedAccountId TEXT NOT NULL DEFAULT ''")
}
}
val MIGRATION_3_4 = object : Migration(3, 4) {
override fun migrate(db: SupportSQLiteDatabase) {
db.execSQL("ALTER TABLE stream_plans ADD COLUMN executionMode TEXT NOT NULL DEFAULT 'IN_GAME'")
}
}
val MIGRATION_4_5 = object : Migration(4, 5) {
override fun migrate(db: SupportSQLiteDatabase) {
db.execSQL("ALTER TABLE linked_accounts ADD COLUMN isEnabled INTEGER NOT NULL DEFAULT 1")
db.execSQL("ALTER TABLE stream_plans ADD COLUMN gameId TEXT NOT NULL DEFAULT ''")
}
}
}
}

View File

@@ -33,4 +33,7 @@ interface LinkedAccountDao {
@Query("DELETE FROM linked_accounts WHERE serviceId = :serviceId")
suspend fun deleteByService(serviceId: String)
@Query("UPDATE linked_accounts SET isEnabled = :isEnabled WHERE id = :id")
suspend fun setEnabled(id: String, isEnabled: Boolean)
}

View File

@@ -10,4 +10,5 @@ data class LinkedAccountEntity(
val displayName: String,
val accountId: String,
val avatarUrl: String? = null,
val isEnabled: Boolean = true,
)

View File

@@ -8,5 +8,7 @@ data class StreamPlanEntity(
@PrimaryKey val planId: String,
val name: String,
val status: String = "DRAFT",
val executionMode: String = "IN_GAME",
val gameId: String = "",
val createdAt: Long = System.currentTimeMillis(),
)

View File

@@ -67,6 +67,8 @@ data class LinkedAccountResponse(
@JsonClass(generateAdapter = true)
data class CreateStreamPlanRequest(
val name: String,
val executionMode: String? = null,
val gameId: String? = null,
val destinations: List<CreateDestinationRequest>,
)
@@ -85,6 +87,8 @@ data class StreamPlanResponse(
val id: String,
val name: String,
val status: String,
val executionMode: String? = null,
val gameId: String? = null,
val createdAt: String,
val updatedAt: String,
val destinations: List<StreamDestinationResponse>,

View File

@@ -26,6 +26,8 @@ class AccountRepository @Inject constructor(
/** Fetch accounts from backend and sync to Room cache */
suspend fun syncAccounts() {
val remote = apiService.getLinkedAccounts()
// Read local entities to preserve isEnabled across sync
val localMap = accountDao.getAll().associateBy { it.id }
val entities = remote.map { account ->
LinkedAccountEntity(
id = account.id,
@@ -33,12 +35,12 @@ class AccountRepository @Inject constructor(
displayName = account.displayName,
accountId = account.accountId,
avatarUrl = account.avatarUrl,
isEnabled = localMap[account.id]?.isEnabled ?: true,
)
}
// Get current local accounts to detect removals
val local = accountDao.getAll()
// Detect removals
val remoteIds = entities.map { it.id }.toSet()
for (localAccount in local) {
for (localAccount in localMap.values) {
if (localAccount.id !in remoteIds) {
accountDao.deleteById(localAccount.id)
}
@@ -48,6 +50,10 @@ class AccountRepository @Inject constructor(
}
}
suspend fun setAccountEnabled(id: String, enabled: Boolean) {
accountDao.setEnabled(id, enabled)
}
/** Get YouTube OAuth URL from backend (for Custom Tabs) */
suspend fun getYouTubeAuthUrl(): String {
val response = apiService.getYouTubeAuthUrl()
@@ -85,5 +91,6 @@ class AccountRepository @Inject constructor(
accountId = accountId,
avatarUrl = avatarUrl,
isAuthenticated = true, // Backend manages auth state
isEnabled = isEnabled,
)
}

View File

@@ -42,9 +42,16 @@ class StreamPlanRepository @Inject constructor(
}
/** Create plan via backend and cache locally */
suspend fun createPlan(name: String, destinations: List<StreamDestination>): StreamPlan {
suspend fun createPlan(
name: String,
destinations: List<StreamDestination>,
executionMode: String = "IN_GAME",
gameId: String = "",
): StreamPlan {
val request = CreateStreamPlanRequest(
name = name,
executionMode = executionMode,
gameId = gameId.ifBlank { null },
destinations = destinations.map { dest ->
CreateDestinationRequest(
linkedAccountId = dest.linkedAccountId,
@@ -96,7 +103,13 @@ class StreamPlanRepository @Inject constructor(
}
private suspend fun cacheRemotePlan(remote: StreamPlanResponse) {
val planEntity = StreamPlanEntity(planId = remote.id, name = remote.name, status = remote.status)
val planEntity = StreamPlanEntity(
planId = remote.id,
name = remote.name,
status = remote.status,
executionMode = remote.executionMode ?: "IN_GAME",
gameId = remote.gameId ?: "",
)
val destEntities = remote.destinations.map { d ->
StreamDestinationEntity(
id = d.id,
@@ -121,6 +134,8 @@ class StreamPlanRepository @Inject constructor(
planId = plan.planId,
name = plan.name,
status = plan.status,
executionMode = plan.executionMode,
gameId = plan.gameId,
destinations = destinations.map { it.toStreamDestination() },
)

View File

@@ -20,7 +20,7 @@ object DatabaseModule {
@Singleton
fun provideDatabase(@ApplicationContext context: Context): LckDatabase =
Room.databaseBuilder(context, LckDatabase::class.java, "lck_control.db")
.addMigrations(LckDatabase.MIGRATION_1_2, LckDatabase.MIGRATION_2_3)
.addMigrations(LckDatabase.MIGRATION_1_2, LckDatabase.MIGRATION_2_3, LckDatabase.MIGRATION_3_4, LckDatabase.MIGRATION_4_5)
.build()
@Provides

View File

@@ -26,6 +26,9 @@ import com.omixlab.lckcontrol.shared.LinkedAccount
import com.omixlab.lckcontrol.shared.StreamDestination
import com.omixlab.lckcontrol.shared.StreamPlan
import com.omixlab.lckcontrol.shared.StreamPlanConfig
import com.omixlab.lckcontrol.shared.StreamingConfig
import com.omixlab.lckcontrol.streaming.StreamingManager
import com.omixlab.lckcontrol.streaming.StreamingState
import dagger.hilt.android.AndroidEntryPoint
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
@@ -48,15 +51,18 @@ class LckControlService : Service() {
private const val NOTIFICATION_ID = 1
private const val QUEST_APP_ID = "25653777174321448"
private const val TOKEN_REFRESH_INTERVAL_MS = 60_000L
private const val ACTION_BIND_STREAMING = "com.omixlab.lckcontrol.BIND_STREAMING"
}
@Inject lateinit var accountRepository: AccountRepository
@Inject lateinit var streamPlanRepository: StreamPlanRepository
@Inject lateinit var tokenStore: TokenStore
@Inject lateinit var apiService: LckApiService
@Inject lateinit var streamingManager: StreamingManager
private val serviceScope = CoroutineScope(SupervisorJob() + Dispatchers.Main)
private val clientTracker = ClientTracker()
private var streamingServiceImpl: LckStreamingServiceImpl? = null
private val callbacks = object : RemoteCallbackList<ILckControlCallback>() {
override fun onCallbackDied(callback: ILckControlCallback, cookie: Any?) {
val uid = cookie as? Int ?: return
@@ -95,13 +101,20 @@ class LckControlService : Service() {
// ── Stream plans ────────────────────────────────────
override fun createStreamPlan(config: StreamPlanConfig): StreamPlan = runBlocking {
val plan = streamPlanRepository.createPlan(config.name, config.destinations)
val plan = streamPlanRepository.createPlan(
name = config.name,
destinations = config.destinations,
executionMode = config.executionMode,
gameId = config.gameId,
)
broadcastPlansChanged()
plan
}
override fun createDefaultPlan(clientName: String): StreamPlan = runBlocking {
val accounts = accountRepository.getAccounts()
val accounts = accountRepository.getAccounts().filter { it.isEnabled }
val gameId = clientTracker.getAll()
.find { it.clientName == clientName }?.packageName ?: ""
val destinations = accounts.map { account ->
StreamDestination(
service = account.serviceId,
@@ -110,7 +123,11 @@ class LckControlService : Service() {
privacyStatus = "unlisted",
)
}
val plan = streamPlanRepository.createPlan("$clientName Stream", destinations)
val plan = streamPlanRepository.createPlan(
name = "$clientName Stream",
destinations = destinations,
gameId = gameId,
)
broadcastPlansChanged()
plan
}
@@ -137,6 +154,17 @@ class LckControlService : Service() {
try {
streamPlanRepository.startPlan(planId)
val updated = streamPlanRepository.getPlan(planId)
// If APP_STREAMING mode, start the streaming engine
if (updated?.executionMode == "APP_STREAMING") {
streamingManager.startStreaming(
plan = updated,
config = StreamingConfig(),
width = 1920,
height = 1080,
)
}
if (updated != null) broadcastPlanUpdated(updated)
true
} catch (_: Exception) { false }
@@ -147,6 +175,11 @@ class LckControlService : Service() {
if (plan.status == "ENDED") return@runBlocking true
if (plan.status != "LIVE" && plan.status != "READY") return@runBlocking false
try {
// Stop streaming engine if running
if (plan.executionMode == "APP_STREAMING") {
streamingManager.stopStreaming()
}
streamPlanRepository.endPlan(planId)
val updated = streamPlanRepository.getPlan(planId)
if (updated != null) broadcastPlanUpdated(updated)
@@ -222,11 +255,37 @@ class LckControlService : Service() {
}
}
}
// Forward streaming state changes to AIDL callbacks
serviceScope.launch {
streamingManager.state.collect { state ->
streamingServiceImpl?.broadcastStateChanged(state)
}
}
serviceScope.launch {
streamingManager.stats.collect { stats ->
streamingServiceImpl?.broadcastStats(
stats.videoBitrate, stats.audioBitrate, stats.fps, stats.droppedFrames,
)
}
}
}
override fun onBind(intent: Intent?): IBinder = binder
override fun onBind(intent: Intent?): IBinder? {
return when (intent?.action) {
ACTION_BIND_STREAMING -> {
if (streamingServiceImpl == null) {
streamingServiceImpl = LckStreamingServiceImpl(streamingManager)
}
streamingServiceImpl!!.asBinder()
}
else -> binder
}
}
override fun onDestroy() {
streamingManager.stopStreaming()
streamingServiceImpl?.kill()
serviceScope.cancel()
callbacks.kill()
super.onDestroy()

View File

@@ -0,0 +1,138 @@
package com.omixlab.lckcontrol.service
import android.hardware.HardwareBuffer
import android.os.ParcelFileDescriptor
import android.os.RemoteCallbackList
import android.util.Log
import com.omixlab.lckcontrol.shared.ILckStreamingCallback
import com.omixlab.lckcontrol.shared.ILckStreamingService
import com.omixlab.lckcontrol.streaming.StreamingManager
import com.omixlab.lckcontrol.streaming.StreamingState
/**
* AIDL implementation for ILckStreamingService.
* Bridges AIDL IPC calls to the StreamingManager.
* Frame submission methods are one-way for non-blocking game render thread.
*/
class LckStreamingServiceImpl(
private val streamingManager: StreamingManager,
) : ILckStreamingService.Stub() {
companion object {
private const val TAG = "LckStreamingServiceImpl"
}
private val callbacks = RemoteCallbackList<ILckStreamingCallback>()
init {
// Forward state changes to AIDL callbacks
// Note: state observation requires coroutine scope — delegated to LckControlService
}
override fun registerTexturePool(
buffers: Array<HardwareBuffer>,
width: Int,
height: Int,
format: Int,
) {
Log.d(TAG, "registerTexturePool: ${buffers.size} buffers, ${width}x$height")
streamingManager.registerTexturePool(buffers, width, height, format)
}
override fun unregisterTexturePool() {
Log.d(TAG, "unregisterTexturePool")
streamingManager.unregisterTexturePool()
}
override fun submitVideoFrame(
bufferIndex: Int,
timestampNs: Long,
gpuFence: ParcelFileDescriptor?,
) {
val fenceFd = gpuFence?.detachFd() ?: -1
streamingManager.submitVideoFrame(bufferIndex, timestampNs, fenceFd)
}
override fun submitAudioFrame(
pcmData: ByteArray,
timestampNs: Long,
sampleRate: Int,
channels: Int,
bitsPerSample: Int,
) {
streamingManager.submitAudioFrame(pcmData, timestampNs)
}
override fun isStreaming(): Boolean {
return streamingManager.isStreaming()
}
override fun registerStreamingCallback(callback: ILckStreamingCallback) {
callbacks.register(callback)
}
override fun unregisterStreamingCallback(callback: ILckStreamingCallback) {
callbacks.unregister(callback)
}
// ── Broadcast helpers (called from LckControlService coroutine scope) ──
fun broadcastStateChanged(state: StreamingState) {
val stateStr = state.name
val count = callbacks.beginBroadcast()
try {
for (i in 0 until count) {
try {
callbacks.getBroadcastItem(i).onStreamingStateChanged(stateStr)
} catch (_: Exception) {}
}
} finally {
callbacks.finishBroadcast()
}
}
fun broadcastStats(videoBitrate: Long, audioBitrate: Long, fps: Int, droppedFrames: Int) {
val count = callbacks.beginBroadcast()
try {
for (i in 0 until count) {
try {
callbacks.getBroadcastItem(i).onStreamingStats(
videoBitrate, audioBitrate, fps, droppedFrames,
)
} catch (_: Exception) {}
}
} finally {
callbacks.finishBroadcast()
}
}
fun broadcastError(code: Int, message: String) {
val count = callbacks.beginBroadcast()
try {
for (i in 0 until count) {
try {
callbacks.getBroadcastItem(i).onStreamingError(code, message)
} catch (_: Exception) {}
}
} finally {
callbacks.finishBroadcast()
}
}
fun broadcastBufferReleased(bufferIndex: Int) {
val count = callbacks.beginBroadcast()
try {
for (i in 0 until count) {
try {
callbacks.getBroadcastItem(i).onBufferReleased(bufferIndex)
} catch (_: Exception) {}
}
} finally {
callbacks.finishBroadcast()
}
}
fun kill() {
callbacks.kill()
}
}

View File

@@ -0,0 +1,112 @@
package com.omixlab.lckcontrol.streaming
import android.hardware.HardwareBuffer
import android.util.Log
/**
* Thin JNI wrapper around the C++ StreamingEngine.
* All encoding, muxing, and RTMP streaming happens in native code (zero-copy pipeline).
*/
class NativeStreamingEngine {
companion object {
private const val TAG = "NativeStreamingEngine"
init {
System.loadLibrary("lck_streaming")
}
}
private var nativePtr: Long = 0
var onStats: ((StreamingStats) -> Unit)? = null
var onError: ((Int, String) -> Unit)? = null
var onBufferReleased: ((Int) -> Unit)? = null
fun create(
width: Int,
height: Int,
videoBitrate: Int,
audioBitrate: Int,
sampleRate: Int,
channels: Int,
keyframeInterval: Int,
) {
if (nativePtr != 0L) {
Log.w(TAG, "Engine already created, destroying first")
destroy()
}
nativePtr = nativeCreate(width, height, videoBitrate, audioBitrate,
sampleRate, channels, keyframeInterval)
}
fun addDestination(rtmpUrl: String): Int {
check(nativePtr != 0L) { "Engine not created" }
return nativeAddDestination(nativePtr, rtmpUrl)
}
fun start(): Boolean {
check(nativePtr != 0L) { "Engine not created" }
return nativeStart(nativePtr)
}
fun submitVideoFrame(hardwareBuffer: HardwareBuffer, timestampNs: Long, fenceFd: Int) {
if (nativePtr == 0L) return
nativeSubmitVideoFrame(nativePtr, hardwareBuffer, timestampNs, fenceFd)
}
fun submitAudioFrame(pcmData: ByteArray, timestampNs: Long) {
if (nativePtr == 0L) return
nativeSubmitAudioFrame(nativePtr, pcmData, timestampNs)
}
fun stop() {
if (nativePtr == 0L) return
nativeStop(nativePtr)
}
fun destroy() {
if (nativePtr != 0L) {
nativeDestroy(nativePtr)
nativePtr = 0
}
}
fun isRunning(): Boolean {
if (nativePtr == 0L) return false
return nativeIsRunning(nativePtr)
}
// Called from native code (JNI callbacks)
@Suppress("unused")
private fun onNativeStats(videoBitrate: Long, audioBitrate: Long, fps: Int, droppedFrames: Int) {
onStats?.invoke(StreamingStats(videoBitrate, audioBitrate, fps, droppedFrames))
}
@Suppress("unused")
private fun onNativeError(code: Int, message: String) {
Log.e(TAG, "Native error $code: $message")
onError?.invoke(code, message)
}
@Suppress("unused")
private fun onNativeBufferReleased(bufferIndex: Int) {
onBufferReleased?.invoke(bufferIndex)
}
// Native methods
private external fun nativeCreate(
width: Int, height: Int,
videoBitrate: Int, audioBitrate: Int,
sampleRate: Int, channels: Int,
keyframeInterval: Int,
): Long
private external fun nativeAddDestination(ptr: Long, rtmpUrl: String): Int
private external fun nativeStart(ptr: Long): Boolean
private external fun nativeSubmitVideoFrame(ptr: Long, hardwareBuffer: HardwareBuffer, timestampNs: Long, fenceFd: Int)
private external fun nativeSubmitAudioFrame(ptr: Long, pcmData: ByteArray, timestampNs: Long)
private external fun nativeStop(ptr: Long)
private external fun nativeDestroy(ptr: Long)
private external fun nativeIsRunning(ptr: Long): Boolean
}

View File

@@ -0,0 +1,156 @@
package com.omixlab.lckcontrol.streaming
import android.hardware.HardwareBuffer
import android.util.Log
import com.omixlab.lckcontrol.shared.StreamPlan
import com.omixlab.lckcontrol.shared.StreamingConfig
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.flow.asStateFlow
import javax.inject.Inject
import javax.inject.Singleton
enum class StreamingState {
IDLE, STARTING, LIVE, STOPPING, ERROR
}
/**
* High-level streaming lifecycle manager.
* Bridges stream plan configuration to the native streaming engine.
* Stream keys and RTMP URLs stay within the app process — never exposed via AIDL.
*/
@Singleton
class StreamingManager @Inject constructor() {
companion object {
private const val TAG = "StreamingManager"
}
private var engine: NativeStreamingEngine? = null
private var texturePoolBuffers: Array<HardwareBuffer>? = null
private val _state = MutableStateFlow(StreamingState.IDLE)
val state: StateFlow<StreamingState> = _state.asStateFlow()
private val _stats = MutableStateFlow(StreamingStats())
val stats: StateFlow<StreamingStats> = _stats.asStateFlow()
private val _error = MutableStateFlow<String?>(null)
val error: StateFlow<String?> = _error.asStateFlow()
/**
* Start streaming for a plan with APP_STREAMING execution mode.
* RTMP URLs are constructed internally from the plan's destinations.
*/
fun startStreaming(plan: StreamPlan, config: StreamingConfig, width: Int, height: Int) {
if (_state.value != StreamingState.IDLE) {
Log.w(TAG, "Cannot start streaming, current state: ${_state.value}")
return
}
val destinations = plan.destinations.filter {
it.rtmpUrl.isNotBlank() && it.streamKey.isNotBlank()
}
if (destinations.isEmpty()) {
_error.value = "No destinations with RTMP credentials"
_state.value = StreamingState.ERROR
return
}
_state.value = StreamingState.STARTING
_error.value = null
try {
val eng = NativeStreamingEngine()
eng.create(
width = width,
height = height,
videoBitrate = config.videoBitrate,
audioBitrate = config.audioBitrate,
sampleRate = config.audioSampleRate,
channels = config.audioChannels,
keyframeInterval = config.keyFrameInterval,
)
// Add RTMP destinations — stream keys stay in-process
for (dest in destinations) {
val fullUrl = "${dest.rtmpUrl}/${dest.streamKey}"
eng.addDestination(fullUrl)
Log.d(TAG, "Added destination: ${dest.service}")
}
eng.onStats = { stats ->
_stats.value = stats
}
eng.onError = { code, message ->
Log.e(TAG, "Streaming error $code: $message")
_error.value = message
_state.value = StreamingState.ERROR
}
if (eng.start()) {
engine = eng
_state.value = StreamingState.LIVE
Log.i(TAG, "Streaming started with ${destinations.size} destinations")
} else {
eng.destroy()
_error.value = "Failed to start streaming engine"
_state.value = StreamingState.ERROR
}
} catch (e: Exception) {
Log.e(TAG, "Failed to start streaming", e)
_error.value = e.message ?: "Unknown error"
_state.value = StreamingState.ERROR
}
}
/**
* Register texture pool buffers from the game.
* Buffers are stored for reference — the native engine receives individual
* buffers via submitVideoFrame.
*/
fun registerTexturePool(buffers: Array<HardwareBuffer>, width: Int, height: Int, format: Int) {
texturePoolBuffers = buffers
Log.d(TAG, "Texture pool registered: ${buffers.size} buffers, ${width}x${height}")
}
fun unregisterTexturePool() {
texturePoolBuffers = null
Log.d(TAG, "Texture pool unregistered")
}
/** Forward a video frame from the game to the native engine. */
fun submitVideoFrame(bufferIndex: Int, timestampNs: Long, fenceFd: Int) {
val buffers = texturePoolBuffers ?: return
if (bufferIndex < 0 || bufferIndex >= buffers.size) return
engine?.submitVideoFrame(buffers[bufferIndex], timestampNs, fenceFd)
}
/** Forward audio PCM from the game to the native engine. */
fun submitAudioFrame(pcmData: ByteArray, timestampNs: Long) {
engine?.submitAudioFrame(pcmData, timestampNs)
}
/** Stop streaming and release all resources. */
fun stopStreaming() {
if (_state.value != StreamingState.LIVE && _state.value != StreamingState.ERROR) {
return
}
_state.value = StreamingState.STOPPING
engine?.let { eng ->
eng.stop()
eng.destroy()
}
engine = null
_state.value = StreamingState.IDLE
_stats.value = StreamingStats()
Log.i(TAG, "Streaming stopped")
}
fun isStreaming(): Boolean = _state.value == StreamingState.LIVE
}

View File

@@ -0,0 +1,8 @@
package com.omixlab.lckcontrol.streaming
data class StreamingStats(
val videoBitrate: Long = 0,
val audioBitrate: Long = 0,
val fps: Int = 0,
val droppedFrames: Int = 0,
)

View File

@@ -23,6 +23,7 @@ import androidx.compose.material3.OutlinedButton
import androidx.compose.material3.Scaffold
import androidx.compose.material3.SnackbarHost
import androidx.compose.material3.SnackbarHostState
import androidx.compose.material3.Switch
import androidx.compose.material3.Text
import androidx.compose.material3.TopAppBar
import androidx.compose.runtime.Composable
@@ -80,6 +81,10 @@ fun AccountsScreen(
Text(account.displayName, style = MaterialTheme.typography.titleSmall)
Text(account.serviceId, style = MaterialTheme.typography.bodySmall)
}
Switch(
checked = account.isEnabled,
onCheckedChange = { viewModel.toggleAccountEnabled(account.id, it) },
)
IconButton(onClick = { viewModel.unlinkAccount(account.id) }) {
Icon(Icons.Default.LinkOff, contentDescription = "Unlink")
}

View File

@@ -65,6 +65,16 @@ class AccountsViewModel @Inject constructor(
}
}
fun toggleAccountEnabled(accountId: String, enabled: Boolean) {
viewModelScope.launch {
try {
accountRepository.setAccountEnabled(accountId, enabled)
} catch (e: Exception) {
_linkError.value = e.message ?: "Failed to update account"
}
}
}
fun unlinkAccount(accountId: String) {
viewModelScope.launch {
try {

View File

@@ -9,6 +9,7 @@ import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.size
import androidx.compose.foundation.layout.width
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.lazy.items
@@ -41,8 +42,8 @@ fun DashboardScreen(
onNavigateToPlan: (String) -> Unit,
viewModel: DashboardViewModel = hiltViewModel(),
) {
val accounts by viewModel.accounts.collectAsStateWithLifecycle()
val plans by viewModel.plans.collectAsStateWithLifecycle()
val backendHealthy by viewModel.backendHealthy.collectAsStateWithLifecycle()
Scaffold(
topBar = {
@@ -63,35 +64,28 @@ fun DashboardScreen(
) {
item {
Spacer(Modifier.height(8.dp))
Text("Linked Accounts", style = MaterialTheme.typography.titleMedium)
Text("Server Status", style = MaterialTheme.typography.titleMedium)
Spacer(Modifier.height(4.dp))
}
if (accounts.isEmpty()) {
item {
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.surfaceVariant,
),
ElevatedCard(modifier = Modifier.fillMaxWidth()) {
Row(
modifier = Modifier.padding(16.dp),
verticalAlignment = Alignment.CenterVertically,
) {
Text(
"No accounts linked yet. Go to Accounts to get started.",
modifier = Modifier.padding(16.dp),
style = MaterialTheme.typography.bodyMedium,
val (color, label) = when (backendHealthy) {
true -> MaterialTheme.colorScheme.primary to "Connected"
false -> MaterialTheme.colorScheme.error to "Unreachable"
null -> MaterialTheme.colorScheme.outline to "Checking..."
}
Icon(
Icons.Default.Circle,
contentDescription = label,
tint = color,
modifier = Modifier.size(12.dp),
)
}
}
} else {
item {
Row(horizontalArrangement = Arrangement.spacedBy(8.dp)) {
accounts.forEach { account ->
ElevatedCard {
Column(modifier = Modifier.padding(12.dp)) {
Text(account.displayName, style = MaterialTheme.typography.labelLarge)
Text(account.serviceId, style = MaterialTheme.typography.bodySmall)
}
}
Spacer(Modifier.width(12.dp))
Column {
Text("Backend", style = MaterialTheme.typography.titleSmall)
Text(label, style = MaterialTheme.typography.bodySmall, color = color)
}
}
}

View File

@@ -2,32 +2,45 @@ package com.omixlab.lckcontrol.ui.dashboard
import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import com.omixlab.lckcontrol.data.repository.AccountRepository
import com.omixlab.lckcontrol.data.remote.LckApiService
import com.omixlab.lckcontrol.data.repository.StreamPlanRepository
import com.omixlab.lckcontrol.shared.LinkedAccount
import com.omixlab.lckcontrol.shared.StreamPlan
import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.SharingStarted
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.flow.asStateFlow
import kotlinx.coroutines.flow.stateIn
import kotlinx.coroutines.launch
import javax.inject.Inject
@HiltViewModel
class DashboardViewModel @Inject constructor(
accountRepository: AccountRepository,
private val streamPlanRepository: StreamPlanRepository,
private val apiService: LckApiService,
) : ViewModel() {
val accounts: StateFlow<List<LinkedAccount>> = accountRepository.observeAccounts()
.stateIn(viewModelScope, SharingStarted.WhileSubscribed(5_000), emptyList())
val plans: StateFlow<List<StreamPlan>> = streamPlanRepository.observePlans()
.stateIn(viewModelScope, SharingStarted.WhileSubscribed(5_000), emptyList())
private val _backendHealthy = MutableStateFlow<Boolean?>(null)
val backendHealthy: StateFlow<Boolean?> = _backendHealthy.asStateFlow()
init {
viewModelScope.launch {
try { streamPlanRepository.syncPlans() } catch (_: Exception) {}
}
viewModelScope.launch {
while (true) {
_backendHealthy.value = try {
apiService.healthCheck()
true
} catch (_: Exception) {
false
}
delay(5_000)
}
}
}
}

View File

@@ -1,15 +1,11 @@
package com.omixlab.lckcontrol.ui.navigation
import androidx.compose.foundation.layout.Box
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.size
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.Circle
import androidx.compose.material.icons.filled.Dashboard
import androidx.compose.material.icons.filled.Devices
import androidx.compose.material.icons.filled.Person
import androidx.compose.material3.Icon
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.NavigationBar
import androidx.compose.material3.NavigationBarItem
import androidx.compose.material3.Scaffold
@@ -17,13 +13,8 @@ import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.runtime.LaunchedEffect
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.remember
import androidx.compose.runtime.setValue
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.vector.ImageVector
import androidx.compose.ui.unit.dp
import androidx.navigation.NavGraph.Companion.findStartDestination
import androidx.navigation.NavType
import androidx.navigation.compose.NavHost
@@ -39,7 +30,6 @@ import com.omixlab.lckcontrol.ui.dashboard.DashboardScreen
import com.omixlab.lckcontrol.ui.login.LoginScreen
import com.omixlab.lckcontrol.ui.plans.CreatePlanScreen
import com.omixlab.lckcontrol.ui.plans.PlanDetailScreen
import kotlinx.coroutines.delay
private data class BottomNavItem(
val screen: Screen,
@@ -62,22 +52,6 @@ fun AppNavigation(tokenStore: TokenStore, apiService: LckApiService) {
val showBottomBar = currentRoute in bottomNavItems.map { it.screen.route }
val startDestination = if (tokenStore.isLoggedIn()) Screen.Dashboard.route else Screen.Login.route
// Backend health state
var backendHealthy by remember { mutableStateOf<Boolean?>(null) }
// Poll backend health every 5 seconds
LaunchedEffect(Unit) {
while (true) {
backendHealthy = try {
apiService.healthCheck()
true
} catch (_: Exception) {
false
}
delay(5_000)
}
}
// Session validation on app open — if we think we're logged in, verify it
LaunchedEffect(Unit) {
if (tokenStore.isLoggedIn()) {
@@ -101,24 +75,7 @@ fun AppNavigation(tokenStore: TokenStore, apiService: LckApiService) {
bottomNavItems.forEach { item ->
NavigationBarItem(
icon = {
if (item.screen == Screen.Dashboard && backendHealthy != null) {
Box {
Icon(item.icon, contentDescription = item.label)
Icon(
Icons.Default.Circle,
contentDescription = if (backendHealthy == true) "Backend healthy" else "Backend unreachable",
tint = if (backendHealthy == true)
MaterialTheme.colorScheme.primary
else
MaterialTheme.colorScheme.error,
modifier = Modifier
.size(8.dp)
.align(Alignment.TopEnd),
)
}
} else {
Icon(item.icon, contentDescription = item.label)
}
Icon(item.icon, contentDescription = item.label)
},
label = { Text(item.label) },
selected = currentRoute == item.screen.route,

View File

@@ -20,6 +20,7 @@ import androidx.compose.material3.ElevatedCard
import androidx.compose.material3.ExperimentalMaterial3Api
import androidx.compose.material3.ExposedDropdownMenuBox
import androidx.compose.material3.ExposedDropdownMenuDefaults
import androidx.compose.material3.FilterChip
import androidx.compose.material3.Icon
import androidx.compose.material3.IconButton
import androidx.compose.material3.MaterialTheme
@@ -51,6 +52,8 @@ fun CreatePlanScreen(
viewModel: CreatePlanViewModel = hiltViewModel(),
) {
val planName by viewModel.planName.collectAsStateWithLifecycle()
val executionMode by viewModel.executionMode.collectAsStateWithLifecycle()
val gameId by viewModel.gameId.collectAsStateWithLifecycle()
val destinations by viewModel.destinations.collectAsStateWithLifecycle()
val linkedAccounts by viewModel.linkedAccounts.collectAsStateWithLifecycle()
val isCreating by viewModel.isCreating.collectAsStateWithLifecycle()
@@ -95,6 +98,46 @@ fun CreatePlanScreen(
)
}
item {
Spacer(Modifier.height(8.dp))
Text("Execution Mode", style = MaterialTheme.typography.titleMedium)
Spacer(Modifier.height(4.dp))
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(8.dp),
) {
FilterChip(
selected = executionMode == "IN_GAME",
onClick = { viewModel.setExecutionMode("IN_GAME") },
label = { Text("In-Game") },
)
FilterChip(
selected = executionMode == "APP_STREAMING",
onClick = { viewModel.setExecutionMode("APP_STREAMING") },
label = { Text("App Streaming") },
)
}
if (executionMode == "APP_STREAMING") {
Spacer(Modifier.height(4.dp))
Text(
"The app encodes and streams. Stream keys stay secure.",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant,
)
}
}
item {
OutlinedTextField(
value = gameId,
onValueChange = viewModel::setGameId,
label = { Text("Game Package ID") },
placeholder = { Text("com.example.game") },
modifier = Modifier.fillMaxWidth(),
singleLine = true,
)
}
item {
Spacer(Modifier.height(8.dp))
Row(

View File

@@ -37,6 +37,12 @@ class CreatePlanViewModel @Inject constructor(
private val _planName = MutableStateFlow("")
val planName: StateFlow<String> = _planName.asStateFlow()
private val _executionMode = MutableStateFlow("IN_GAME")
val executionMode: StateFlow<String> = _executionMode.asStateFlow()
private val _gameId = MutableStateFlow("")
val gameId: StateFlow<String> = _gameId.asStateFlow()
private val _destinations = MutableStateFlow<List<DestinationInput>>(emptyList())
val destinations: StateFlow<List<DestinationInput>> = _destinations.asStateFlow()
@@ -50,6 +56,14 @@ class CreatePlanViewModel @Inject constructor(
_planName.value = name
}
fun setExecutionMode(mode: String) {
_executionMode.value = mode
}
fun setGameId(gameId: String) {
_gameId.value = gameId
}
fun addDestination() {
_destinations.value = _destinations.value + DestinationInput()
}
@@ -100,7 +114,7 @@ class CreatePlanViewModel @Inject constructor(
tags = input.tags.split(",").map { it.trim() }.filter { it.isNotBlank() },
)
}
val plan = streamPlanRepository.createPlan(name, streamDests)
val plan = streamPlanRepository.createPlan(name, streamDests, _executionMode.value, _gameId.value)
onCreated(plan.planId)
} catch (e: Exception) {
_error.value = e.message ?: "Failed to create plan"

View File

@@ -40,6 +40,7 @@ import androidx.compose.ui.unit.dp
import androidx.hilt.navigation.compose.hiltViewModel
import androidx.lifecycle.compose.collectAsStateWithLifecycle
import com.omixlab.lckcontrol.shared.StreamDestination
import com.omixlab.lckcontrol.streaming.StreamingState
@OptIn(ExperimentalMaterial3Api::class)
@Composable
@@ -51,6 +52,8 @@ fun PlanDetailScreen(
val plan by viewModel.plan.collectAsStateWithLifecycle()
val isLoading by viewModel.isLoading.collectAsStateWithLifecycle()
val error by viewModel.error.collectAsStateWithLifecycle()
val streamingState by viewModel.streamingState.collectAsStateWithLifecycle()
val streamingStats by viewModel.streamingStats.collectAsStateWithLifecycle()
val snackbarHostState = remember { SnackbarHostState() }
LaunchedEffect(error) {
@@ -122,6 +125,45 @@ fun PlanDetailScreen(
}
}
// Execution mode
item {
ElevatedCard(modifier = Modifier.fillMaxWidth()) {
Column(modifier = Modifier.padding(16.dp)) {
Text("Execution Mode", style = MaterialTheme.typography.labelMedium)
Spacer(Modifier.height(4.dp))
Text(
when (currentPlan.executionMode) {
"APP_STREAMING" -> "App Streaming"
else -> "In-Game"
},
style = MaterialTheme.typography.bodyMedium,
)
}
}
}
// Game ID
if (currentPlan.gameId.isNotBlank()) {
item {
ElevatedCard(modifier = Modifier.fillMaxWidth()) {
Column(modifier = Modifier.padding(16.dp)) {
Text("Game", style = MaterialTheme.typography.labelMedium)
Spacer(Modifier.height(4.dp))
Text(currentPlan.gameId, style = MaterialTheme.typography.bodyMedium)
}
}
}
}
// Streaming stats (only for APP_STREAMING + LIVE)
if (currentPlan.executionMode == "APP_STREAMING" &&
currentPlan.status == "LIVE" &&
streamingState == StreamingState.LIVE) {
item {
StreamingStatsCard(stats = streamingStats)
}
}
// Action buttons
item {
when (currentPlan.status) {

View File

@@ -5,6 +5,9 @@ import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import com.omixlab.lckcontrol.data.repository.StreamPlanRepository
import com.omixlab.lckcontrol.shared.StreamPlan
import com.omixlab.lckcontrol.streaming.StreamingManager
import com.omixlab.lckcontrol.streaming.StreamingState
import com.omixlab.lckcontrol.streaming.StreamingStats
import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.SharingStarted
@@ -18,6 +21,7 @@ import javax.inject.Inject
class PlanDetailViewModel @Inject constructor(
savedStateHandle: SavedStateHandle,
private val streamPlanRepository: StreamPlanRepository,
private val streamingManager: StreamingManager,
) : ViewModel() {
private val planId: String = savedStateHandle["planId"] ?: ""
@@ -32,6 +36,9 @@ class PlanDetailViewModel @Inject constructor(
}
}
val streamingState: StateFlow<StreamingState> = streamingManager.state
val streamingStats: StateFlow<StreamingStats> = streamingManager.stats
private val _isLoading = MutableStateFlow(false)
val isLoading: StateFlow<Boolean> = _isLoading.asStateFlow()

View File

@@ -0,0 +1,52 @@
package com.omixlab.lckcontrol.ui.plans
import androidx.compose.foundation.layout.Arrangement
import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.Row
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.padding
import androidx.compose.material3.ElevatedCard
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.ui.Modifier
import androidx.compose.ui.unit.dp
import com.omixlab.lckcontrol.streaming.StreamingStats
@Composable
fun StreamingStatsCard(stats: StreamingStats) {
ElevatedCard(modifier = Modifier.fillMaxWidth()) {
Column(
modifier = Modifier.padding(16.dp),
verticalArrangement = Arrangement.spacedBy(8.dp),
) {
Text("Streaming Stats", style = MaterialTheme.typography.titleSmall)
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween,
) {
StatItem("Video", formatBitrate(stats.videoBitrate))
StatItem("Audio", formatBitrate(stats.audioBitrate))
StatItem("FPS", "${stats.fps}")
StatItem("Dropped", "${stats.droppedFrames}")
}
}
}
}
@Composable
private fun StatItem(label: String, value: String) {
Column {
Text(label, style = MaterialTheme.typography.labelSmall)
Text(value, style = MaterialTheme.typography.bodyMedium)
}
}
private fun formatBitrate(bps: Long): String {
return when {
bps >= 1_000_000 -> "%.1f Mbps".format(bps / 1_000_000.0)
bps >= 1_000 -> "%.0f kbps".format(bps / 1_000.0)
else -> "$bps bps"
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.