增加Android端接收demo

This commit is contained in:
2025-12-18 23:50:53 +08:00
parent 6905562732
commit 36841d0b48
13 changed files with 631 additions and 0 deletions

View File

@@ -0,0 +1,49 @@
plugins {
id 'com.android.application'
id 'kotlin-android'
}
android {
compileSdk 31
defaultConfig {
applicationId "com.displayflow.receiver"
minSdk 24
targetSdk 31
versionCode 1
versionName "1.0"
externalNativeBuild {
cmake {
cppFlags "-std=c++17"
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
externalNativeBuild {
cmake {
path file('src/main/cpp/CMakeLists.txt')
version '3.18.1'
}
}
}
dependencies {
implementation 'androidx.core:core-ktx:1.7.0'
implementation 'androidx.appcompat:appcompat:1.4.1'
implementation 'com.google.android.material:material:1.5.0'
implementation 'androidx.constraintlayout:constraintlayout:2.1.3'
}

View File

@@ -0,0 +1,28 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.displayflow.receiver">
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="DisplayFlow Receiver"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.AppCompat.NoActionBar">
<activity
android:name=".MainActivity"
android:exported="true"
android:screenOrientation="landscape"
android:configChanges="orientation|screenSize|keyboardHidden">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@@ -0,0 +1,36 @@
cmake_minimum_required(VERSION 3.10.2)
project("receiver-lib")
add_library( # Sets the name of the library.
receiver-lib
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
native-lib.cpp
UdpReceiver.cpp
VideoDecoder.cpp
ReceiverEngine.cpp
)
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
find_library( android-lib android )
find_library( mediandk-lib mediandk )
target_link_libraries( # Specifies the target library.
receiver-lib
# Links the target library to the log library
# included in the NDK.
${log-lib}
${android-lib}
${mediandk-lib}
)

View File

@@ -0,0 +1,47 @@
#include "ReceiverEngine.h"
#include <android/log.h>
#define LOG_TAG "ReceiverEngine"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
ReceiverEngine::ReceiverEngine(JNIEnv* env, jobject surface) {
window_ = ANativeWindow_fromSurface(env, surface);
// Bind callback
receiver_.SetCallback([this](const std::vector<uint8_t>& data, const FrameHeader& header) {
this->OnFrameReceived(data, header);
});
}
ReceiverEngine::~ReceiverEngine() {
Stop();
if (window_) {
ANativeWindow_release(window_);
window_ = nullptr;
}
}
void ReceiverEngine::Start(int port) {
receiver_.Start(port);
}
void ReceiverEngine::Stop() {
receiver_.Stop();
decoder_.Release();
}
void ReceiverEngine::OnFrameReceived(const std::vector<uint8_t>& data, const FrameHeader& header) {
if (!decoder_initialized_) {
// Init decoder on first frame (assuming we get width/height from header)
// Note: Windows sender sends width/height in FrameHeader
if (header.width > 0 && header.height > 0) {
if (decoder_.Initialize(window_, header.width, header.height)) {
decoder_initialized_ = true;
}
}
}
if (decoder_initialized_) {
decoder_.Decode(data.data(), data.size(), header.timestamp);
}
}

View File

@@ -0,0 +1,24 @@
#pragma once
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <jni.h>
#include "UdpReceiver.h"
#include "VideoDecoder.h"
class ReceiverEngine {
public:
ReceiverEngine(JNIEnv* env, jobject surface);
~ReceiverEngine();
void Start(int port);
void Stop();
private:
void OnFrameReceived(const std::vector<uint8_t>& data, const FrameHeader& header);
ANativeWindow* window_ = nullptr;
UdpReceiver receiver_;
VideoDecoder decoder_;
bool decoder_initialized_ = false;
};

View File

@@ -0,0 +1,153 @@
#include "UdpReceiver.h"
#include <sys/socket.h>
#include <netinet/in.h>
#include <unistd.h>
#include <android/log.h>
#include <cstring>
#define LOG_TAG "UdpReceiver"
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
UdpReceiver::UdpReceiver() {}
UdpReceiver::~UdpReceiver() {
Stop();
}
bool UdpReceiver::Start(int port) {
if (running_) return true;
sockfd_ = socket(AF_INET, SOCK_DGRAM, 0);
if (sockfd_ < 0) {
LOGE("Failed to create socket");
return false;
}
struct sockaddr_in addr;
memset(&addr, 0, sizeof(addr));
addr.sin_family = AF_INET;
addr.sin_port = htons(port);
addr.sin_addr.s_addr = INADDR_ANY;
if (bind(sockfd_, (struct sockaddr*)&addr, sizeof(addr)) < 0) {
LOGE("Failed to bind socket");
close(sockfd_);
return false;
}
running_ = true;
worker_thread_ = std::thread(&UdpReceiver::ReceiveLoop, this);
return true;
}
void UdpReceiver::Stop() {
running_ = false;
if (sockfd_ >= 0) {
close(sockfd_);
sockfd_ = -1;
}
if (worker_thread_.joinable()) {
worker_thread_.join();
}
}
void UdpReceiver::SetCallback(OnFrameReceivedCallback callback) {
callback_ = callback;
}
void UdpReceiver::ReceiveLoop() {
std::vector<uint8_t> buffer(65535);
LOGI("Receiver thread started");
while (running_) {
ssize_t received = recvfrom(sockfd_, buffer.data(), buffer.size(), 0, nullptr, nullptr);
if (received <= 0) {
if (running_) LOGE("Recv failed or socket closed");
continue;
}
if (received < 8) continue; // Header size check
// Parse Packet Header
uint32_t frameId = *reinterpret_cast<uint32_t*>(&buffer[0]);
uint16_t fragId = *reinterpret_cast<uint16_t*>(&buffer[4]);
uint16_t totalFrags = *reinterpret_cast<uint16_t*>(&buffer[6]);
size_t payloadSize = received - 8;
uint8_t* payload = &buffer[8];
std::lock_guard<std::mutex> lock(frames_mutex_);
PendingFrame& frame = pending_frames_[frameId];
// Init frame if new
if (frame.fragments.empty()) {
frame.frameId = frameId;
frame.totalFrags = totalFrags;
frame.receivedFrags = 0;
frame.startTime = std::chrono::steady_clock::now();
}
// Store fragment
if (frame.fragments.find(fragId) == frame.fragments.end()) {
frame.fragments[fragId] = std::vector<uint8_t>(payload, payload + payloadSize);
frame.receivedFrags++;
// If first fragment, parse Frame Header
if (fragId == 0 && payloadSize >= 24) {
frame.header.timestamp = *reinterpret_cast<uint64_t*>(payload);
frame.header.width = *reinterpret_cast<uint32_t*>(payload + 8);
frame.header.height = *reinterpret_cast<uint32_t*>(payload + 12);
frame.header.type = *reinterpret_cast<uint32_t*>(payload + 16);
frame.header.size = *reinterpret_cast<uint32_t*>(payload + 20);
// Remove header from payload for reassembly
// Note: Windows sender includes frame header in the H264 stream logic?
// Let's check sender logic.
// Sender logic:
// memcpy(packet.data() + 8, &header, sizeof(header));
// memcpy(packet.data() + 8 + sizeof(header), frameData, ...);
// So frag 0 contains: PacketHeader(8) + FrameHeader(24) + H264Data...
// We stored payload (FrameHeader + H264Data) in fragments[0].
// When reassembling, we should skip the FrameHeader (24 bytes) from frag 0.
}
}
// Check completion
if (frame.receivedFrags == frame.totalFrags) {
// Reassemble
std::vector<uint8_t> fullFrame;
fullFrame.reserve(frame.header.size); // Approximate or exact
for (uint16_t i = 0; i < frame.totalFrags; ++i) {
if (frame.fragments.count(i)) {
const auto& fragData = frame.fragments[i];
if (i == 0) {
// Skip Frame Header (24 bytes)
if (fragData.size() > 24) {
fullFrame.insert(fullFrame.end(), fragData.begin() + 24, fragData.end());
}
} else {
fullFrame.insert(fullFrame.end(), fragData.begin(), fragData.end());
}
} else {
// Missing fragment? Should not happen if receivedFrags == totalFrags
LOGE("Logic error: Missing fragment %d", i);
}
}
if (callback_) {
callback_(fullFrame, frame.header);
}
pending_frames_.erase(frameId);
}
// Cleanup old frames (simple timeout mechanism could be added here)
// For now, just keep map size in check roughly?
if (pending_frames_.size() > 30) {
pending_frames_.erase(pending_frames_.begin()); // Remove oldest
}
}
}

View File

@@ -0,0 +1,55 @@
#pragma once
#include <vector>
#include <thread>
#include <atomic>
#include <functional>
#include <map>
#include <mutex>
struct FrameHeader {
uint64_t timestamp;
uint32_t width;
uint32_t height;
uint32_t type; // 0: Unknown, 1: I-Frame, 2: P-Frame
uint32_t size;
};
// Callback for full frames
using OnFrameReceivedCallback = std::function<void(const std::vector<uint8_t>& frameData, const FrameHeader& header)>;
class UdpReceiver {
public:
UdpReceiver();
~UdpReceiver();
bool Start(int port);
void Stop();
void SetCallback(OnFrameReceivedCallback callback);
private:
void ReceiveLoop();
int sockfd_ = -1;
std::atomic<bool> running_{false};
std::thread worker_thread_;
OnFrameReceivedCallback callback_;
// Fragmentation handling
struct Fragment {
uint16_t fragId;
std::vector<uint8_t> data;
};
struct PendingFrame {
uint32_t frameId;
uint16_t totalFrags;
uint16_t receivedFrags;
std::map<uint16_t, std::vector<uint8_t>> fragments;
FrameHeader header;
std::chrono::steady_clock::time_point startTime;
};
std::map<uint32_t, PendingFrame> pending_frames_;
std::mutex frames_mutex_;
};

View File

@@ -0,0 +1,93 @@
#include "VideoDecoder.h"
#include <android/log.h>
#include <cstring>
#define LOG_TAG "VideoDecoder"
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
VideoDecoder::VideoDecoder() {}
VideoDecoder::~VideoDecoder() {
Release();
}
bool VideoDecoder::Initialize(ANativeWindow* window, int width, int height) {
if (codec_) {
Release();
}
width_ = width;
height_ = height;
codec_ = AMediaCodec_createDecoderByType("video/avc");
if (!codec_) {
LOGE("Failed to create decoder for video/avc");
return false;
}
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, "video/avc");
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, width);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, height);
// Low latency config
AMediaFormat_setInt32(format, "low-latency", 1);
AMediaFormat_setInt32(format, "priority", 0);
media_status_t status = AMediaCodec_configure(codec_, format, window, nullptr, 0);
AMediaFormat_delete(format);
if (status != AMEDIA_OK) {
LOGE("Failed to configure codec: %d", status);
return false;
}
status = AMediaCodec_start(codec_);
if (status != AMEDIA_OK) {
LOGE("Failed to start codec: %d", status);
return false;
}
is_configured_ = true;
LOGI("Decoder initialized: %dx%d", width, height);
return true;
}
void VideoDecoder::Decode(const uint8_t* data, size_t size, uint64_t timestamp) {
if (!is_configured_ || !codec_) return;
// Get Input Buffer
ssize_t bufIdx = AMediaCodec_dequeueInputBuffer(codec_, 2000);
if (bufIdx >= 0) {
size_t bufSize;
uint8_t* buf = AMediaCodec_getInputBuffer(codec_, bufIdx, &bufSize);
if (buf && size <= bufSize) {
memcpy(buf, data, size);
AMediaCodec_queueInputBuffer(codec_, bufIdx, 0, size, timestamp, 0);
}
}
// Handle Output (Rendering)
AMediaCodecBufferInfo info;
ssize_t outBufIdx = AMediaCodec_dequeueOutputBuffer(codec_, &info, 0);
while (outBufIdx >= 0) {
AMediaCodec_releaseOutputBuffer(codec_, outBufIdx, true); // true = render to surface
outBufIdx = AMediaCodec_dequeueOutputBuffer(codec_, &info, 0);
}
if (outBufIdx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
auto format = AMediaCodec_getOutputFormat(codec_);
LOGI("Output format changed: %s", AMediaFormat_toString(format));
AMediaFormat_delete(format);
}
}
void VideoDecoder::Release() {
if (codec_) {
AMediaCodec_stop(codec_);
AMediaCodec_delete(codec_);
codec_ = nullptr;
}
is_configured_ = false;
}

View File

@@ -0,0 +1,23 @@
#pragma once
#include <media/NdkMediaCodec.h>
#include <media/NdkMediaFormat.h>
#include <android/native_window.h>
#include <vector>
#include <string>
class VideoDecoder {
public:
VideoDecoder();
~VideoDecoder();
bool Initialize(ANativeWindow* window, int width, int height);
void Decode(const uint8_t* data, size_t size, uint64_t timestamp);
void Release();
private:
AMediaCodec* codec_ = nullptr;
bool is_configured_ = false;
int width_ = 0;
int height_ = 0;
};

View File

@@ -0,0 +1,46 @@
#include <jni.h>
#include <string>
#include "ReceiverEngine.h"
extern "C" JNIEXPORT jlong JNICALL
Java_com_displayflow_receiver_MainActivity_nativeInit(
JNIEnv* env,
jobject /* this */,
jobject surface) {
auto engine = new ReceiverEngine(env, surface);
return reinterpret_cast<jlong>(engine);
}
extern "C" JNIEXPORT void JNICALL
Java_com_displayflow_receiver_MainActivity_nativeStart(
JNIEnv* env,
jobject /* this */,
jlong enginePtr,
jint port) {
auto engine = reinterpret_cast<ReceiverEngine*>(enginePtr);
if (engine) {
engine->Start(port);
}
}
extern "C" JNIEXPORT void JNICALL
Java_com_displayflow_receiver_MainActivity_nativeStop(
JNIEnv* env,
jobject /* this */,
jlong enginePtr) {
auto engine = reinterpret_cast<ReceiverEngine*>(enginePtr);
if (engine) {
engine->Stop();
}
}
extern "C" JNIEXPORT void JNICALL
Java_com_displayflow_receiver_MainActivity_nativeRelease(
JNIEnv* env,
jobject /* this */,
jlong enginePtr) {
auto engine = reinterpret_cast<ReceiverEngine*>(enginePtr);
if (engine) {
delete engine;
}
}

View File

@@ -0,0 +1,60 @@
package com.displayflow.receiver
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.view.Surface
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.view.WindowManager
import android.widget.FrameLayout
class MainActivity : AppCompatActivity(), SurfaceHolder.Callback {
private lateinit var surfaceView: SurfaceView
private var nativeEngine: Long = 0
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// Keep screen on
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
// Create layout programmatically
val layout = FrameLayout(this)
surfaceView = SurfaceView(this)
layout.addView(surfaceView)
setContentView(layout)
surfaceView.holder.addCallback(this)
}
override fun surfaceCreated(holder: SurfaceHolder) {
// Init native engine with surface
nativeEngine = nativeInit(holder.surface)
nativeStart(nativeEngine, 8888) // Listen on port 8888
}
override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {
// Handle resize if needed
}
override fun surfaceDestroyed(holder: SurfaceHolder) {
if (nativeEngine != 0L) {
nativeStop(nativeEngine)
nativeRelease(nativeEngine)
nativeEngine = 0
}
}
// Native methods
external fun nativeInit(surface: Surface): Long
external fun nativeStart(enginePtr: Long, port: Int)
external fun nativeStop(enginePtr: Long)
external fun nativeRelease(enginePtr: Long)
companion object {
init {
System.loadLibrary("receiver-lib")
}
}
}

View File

@@ -0,0 +1,15 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
mavenCentral()
}
dependencies {
classpath "com.android.tools.build:gradle:7.0.4"
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:1.6.10"
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@@ -0,0 +1,2 @@
include ':app'
rootProject.name = "AndroidReceiverDemo"