增加扩展屏的框架
This commit is contained in:
@@ -17,6 +17,8 @@ set(SOURCES
|
||||
VideoEncoder.h
|
||||
NetworkSender.cpp
|
||||
NetworkSender.h
|
||||
IddBridge.cpp
|
||||
IddBridge.h
|
||||
)
|
||||
|
||||
add_executable(WindowsSenderDemo ${SOURCES})
|
||||
@@ -38,16 +40,22 @@ find_library(SWSCALE_LIBRARY swscale PATHS ${FFMPEG_ROOT}/lib)
|
||||
if (AVCODEC_INCLUDE_DIR AND AVCODEC_LIBRARY)
|
||||
include_directories(${AVCODEC_INCLUDE_DIR})
|
||||
message(STATUS "Found FFmpeg: ${AVCODEC_INCLUDE_DIR}")
|
||||
set(USE_FFMPEG 1)
|
||||
else()
|
||||
message(FATAL_ERROR "FFmpeg not found! Please set FFMPEG_ROOT to your FFmpeg installation.")
|
||||
message(WARNING "FFmpeg not found. Building with NO_FFMPEG stub encoder.")
|
||||
add_definitions(-DNO_FFMPEG)
|
||||
endif()
|
||||
|
||||
target_link_libraries(WindowsSenderDemo
|
||||
d3d11
|
||||
dxgi
|
||||
ws2_32
|
||||
${AVCODEC_LIBRARY}
|
||||
${AVFORMAT_LIBRARY}
|
||||
${AVUTIL_LIBRARY}
|
||||
${SWSCALE_LIBRARY}
|
||||
)
|
||||
if (USE_FFMPEG)
|
||||
target_link_libraries(WindowsSenderDemo
|
||||
${AVCODEC_LIBRARY}
|
||||
${AVFORMAT_LIBRARY}
|
||||
${AVUTIL_LIBRARY}
|
||||
${SWSCALE_LIBRARY}
|
||||
)
|
||||
endif()
|
||||
|
||||
243
demo/windows_sender/IddBridge.cpp
Normal file
243
demo/windows_sender/IddBridge.cpp
Normal file
@@ -0,0 +1,243 @@
|
||||
#include "IddBridge.h"
|
||||
#include <string>
|
||||
#include <chrono>
|
||||
|
||||
IddBridge::IddBridge() = default;
|
||||
|
||||
IddBridge::~IddBridge() {
|
||||
if (frame_acquired_) {
|
||||
ReleaseFrame();
|
||||
}
|
||||
if (shared_) {
|
||||
UnmapViewOfFile(shared_);
|
||||
shared_ = nullptr;
|
||||
}
|
||||
if (hMap_) {
|
||||
CloseHandle(hMap_);
|
||||
hMap_ = nullptr;
|
||||
}
|
||||
if (hReadyEvent_) {
|
||||
CloseHandle(hReadyEvent_);
|
||||
hReadyEvent_ = nullptr;
|
||||
}
|
||||
if (hConsumedEvent_) {
|
||||
CloseHandle(hConsumedEvent_);
|
||||
hConsumedEvent_ = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
bool IddBridge::Initialize() {
|
||||
D3D_FEATURE_LEVEL featureLevels[] = {
|
||||
D3D_FEATURE_LEVEL_11_1,
|
||||
D3D_FEATURE_LEVEL_11_0,
|
||||
};
|
||||
D3D_FEATURE_LEVEL featureLevel;
|
||||
HRESULT hr = D3D11CreateDevice(
|
||||
nullptr,
|
||||
D3D_DRIVER_TYPE_HARDWARE,
|
||||
nullptr,
|
||||
D3D11_CREATE_DEVICE_BGRA_SUPPORT,
|
||||
featureLevels,
|
||||
ARRAYSIZE(featureLevels),
|
||||
D3D11_SDK_VERSION,
|
||||
&device_,
|
||||
&featureLevel,
|
||||
&context_
|
||||
);
|
||||
if (FAILED(hr)) {
|
||||
hr = D3D11CreateDevice(
|
||||
nullptr,
|
||||
D3D_DRIVER_TYPE_WARP,
|
||||
nullptr,
|
||||
D3D11_CREATE_DEVICE_BGRA_SUPPORT,
|
||||
featureLevels,
|
||||
ARRAYSIZE(featureLevels),
|
||||
D3D11_SDK_VERSION,
|
||||
&device_,
|
||||
&featureLevel,
|
||||
&context_
|
||||
);
|
||||
if (FAILED(hr)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
std::wstring mapName = L"Global\\DisplayFlowIddFrame";
|
||||
std::wstring readyName = L"Global\\DisplayFlowIddReady";
|
||||
std::wstring consumedName = L"Global\\DisplayFlowIddConsumed";
|
||||
|
||||
hMap_ = OpenFileMappingW(FILE_MAP_READ, FALSE, mapName.c_str());
|
||||
if (!hMap_) {
|
||||
// Fallback to Local (Simulation mode)
|
||||
mapName = L"Local\\DisplayFlowIddFrame";
|
||||
readyName = L"Local\\DisplayFlowIddReady";
|
||||
consumedName = L"Local\\DisplayFlowIddConsumed";
|
||||
hMap_ = OpenFileMappingW(FILE_MAP_READ, FALSE, mapName.c_str());
|
||||
}
|
||||
|
||||
if (!hMap_) {
|
||||
// Create for testing; in production UMDF service should create
|
||||
shared_size_ = 64 * 1024 * 1024; // 64MB
|
||||
hMap_ = CreateFileMappingW(INVALID_HANDLE_VALUE, nullptr, PAGE_READWRITE, 0, (DWORD)shared_size_, mapName.c_str());
|
||||
if (!hMap_) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// Unknown size; assume max
|
||||
shared_size_ = 64 * 1024 * 1024;
|
||||
}
|
||||
|
||||
shared_ = (uint8_t*)MapViewOfFile(hMap_, FILE_MAP_READ, 0, 0, shared_size_);
|
||||
if (!shared_) {
|
||||
return false;
|
||||
}
|
||||
|
||||
hReadyEvent_ = OpenEventW(SYNCHRONIZE, FALSE, readyName.c_str());
|
||||
if (!hReadyEvent_) {
|
||||
hReadyEvent_ = CreateEventW(nullptr, FALSE, FALSE, readyName.c_str());
|
||||
}
|
||||
hConsumedEvent_ = OpenEventW(EVENT_MODIFY_STATE, FALSE, consumedName.c_str());
|
||||
if (!hConsumedEvent_) {
|
||||
hConsumedEvent_ = CreateEventW(nullptr, FALSE, FALSE, consumedName.c_str());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool IddBridge::EnsureTexture(int width, int height) {
|
||||
if (gpuTexture_ && tex_width_ == width && tex_height_ == height) return true;
|
||||
gpuTexture_.Reset();
|
||||
|
||||
D3D11_TEXTURE2D_DESC desc = {};
|
||||
desc.Width = width;
|
||||
desc.Height = height;
|
||||
desc.MipLevels = 1;
|
||||
desc.ArraySize = 1;
|
||||
desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
|
||||
desc.SampleDesc.Count = 1;
|
||||
desc.Usage = D3D11_USAGE_DEFAULT;
|
||||
desc.BindFlags = 0;
|
||||
|
||||
HRESULT hr = device_->CreateTexture2D(&desc, nullptr, &gpuTexture_);
|
||||
if (FAILED(hr)) return false;
|
||||
tex_width_ = width;
|
||||
tex_height_ = height;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool IddBridge::CaptureFrame(ComPtr<ID3D11Texture2D>& texture) {
|
||||
if (!shared_ || !hReadyEvent_) return false;
|
||||
|
||||
DWORD wait = WaitForSingleObject(hReadyEvent_, 100);
|
||||
if (wait != WAIT_OBJECT_0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto* hdr = reinterpret_cast<const IddSharedHeader*>(shared_);
|
||||
if (hdr->format != 0) {
|
||||
return false;
|
||||
}
|
||||
int w = (int)hdr->width;
|
||||
int h = (int)hdr->height;
|
||||
int stride = (int)hdr->stride;
|
||||
const uint8_t* pixels = shared_ + sizeof(IddSharedHeader);
|
||||
|
||||
if (!EnsureTexture(w, h)) return false;
|
||||
|
||||
D3D11_BOX box = {};
|
||||
box.left = 0; box.right = w;
|
||||
box.top = 0; box.bottom = h;
|
||||
box.front = 0; box.back = 1;
|
||||
|
||||
context_->UpdateSubresource(gpuTexture_.Get(), 0, nullptr, pixels, stride, 0);
|
||||
|
||||
frame_acquired_ = true;
|
||||
texture = gpuTexture_;
|
||||
|
||||
if (hConsumedEvent_) {
|
||||
SetEvent(hConsumedEvent_);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void IddBridge::ReleaseFrame() {
|
||||
frame_acquired_ = false;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// IddProducer Implementation
|
||||
// ============================================================================
|
||||
|
||||
IddProducer::IddProducer() = default;
|
||||
|
||||
IddProducer::~IddProducer() {
|
||||
if (shared_) UnmapViewOfFile(shared_);
|
||||
if (hMap_) CloseHandle(hMap_);
|
||||
if (hReadyEvent_) CloseHandle(hReadyEvent_);
|
||||
if (hConsumedEvent_) CloseHandle(hConsumedEvent_);
|
||||
}
|
||||
|
||||
bool IddProducer::Initialize() {
|
||||
std::wstring mapName = L"Local\\DisplayFlowIddFrame";
|
||||
std::wstring readyName = L"Local\\DisplayFlowIddReady";
|
||||
std::wstring consumedName = L"Local\\DisplayFlowIddConsumed";
|
||||
|
||||
shared_size_ = 64 * 1024 * 1024; // 64MB
|
||||
|
||||
// Producer prefers Creating
|
||||
hMap_ = CreateFileMappingW(INVALID_HANDLE_VALUE, nullptr, PAGE_READWRITE, 0, (DWORD)shared_size_, mapName.c_str());
|
||||
if (!hMap_) {
|
||||
// Maybe already exists
|
||||
hMap_ = OpenFileMappingW(FILE_MAP_WRITE, FALSE, mapName.c_str());
|
||||
if (!hMap_) return false;
|
||||
}
|
||||
|
||||
shared_ = (uint8_t*)MapViewOfFile(hMap_, FILE_MAP_WRITE, 0, 0, shared_size_);
|
||||
if (!shared_) return false;
|
||||
|
||||
hReadyEvent_ = CreateEventW(nullptr, FALSE, FALSE, readyName.c_str());
|
||||
if (!hReadyEvent_) {
|
||||
hReadyEvent_ = OpenEventW(EVENT_MODIFY_STATE, FALSE, readyName.c_str());
|
||||
if (!hReadyEvent_) return false;
|
||||
}
|
||||
|
||||
hConsumedEvent_ = CreateEventW(nullptr, FALSE, FALSE, consumedName.c_str());
|
||||
if (!hConsumedEvent_) {
|
||||
hConsumedEvent_ = OpenEventW(SYNCHRONIZE, FALSE, consumedName.c_str());
|
||||
// If failed, consumer might create it later.
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool IddProducer::SubmitFrame(const void* data, uint32_t width, uint32_t height, uint32_t stride) {
|
||||
if (!shared_ || !hReadyEvent_) return false;
|
||||
|
||||
// Ensure consumer created the event if we missed it
|
||||
if (!hConsumedEvent_) {
|
||||
hConsumedEvent_ = OpenEventW(SYNCHRONIZE, FALSE, L"Local\\DisplayFlowIddConsumed");
|
||||
}
|
||||
|
||||
// Optional: Wait for previous frame consumption to avoid tearing/overflow
|
||||
// if (hConsumedEvent_) WaitForSingleObject(hConsumedEvent_, 0);
|
||||
// For now, overwrite mode is faster for streaming.
|
||||
|
||||
IddSharedHeader* hdr = reinterpret_cast<IddSharedHeader*>(shared_);
|
||||
|
||||
uint32_t needed = sizeof(IddSharedHeader) + stride * height;
|
||||
if (needed > shared_size_) return false;
|
||||
|
||||
hdr->width = width;
|
||||
hdr->height = height;
|
||||
hdr->format = 0;
|
||||
hdr->stride = stride;
|
||||
hdr->timestamp = (uint64_t)std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
std::chrono::high_resolution_clock::now().time_since_epoch()).count();
|
||||
hdr->frameId = ++frameId_;
|
||||
hdr->dataSize = stride * height;
|
||||
|
||||
std::memcpy(shared_ + sizeof(IddSharedHeader), data, hdr->dataSize);
|
||||
|
||||
SetEvent(hReadyEvent_);
|
||||
return true;
|
||||
}
|
||||
64
demo/windows_sender/IddBridge.h
Normal file
64
demo/windows_sender/IddBridge.h
Normal file
@@ -0,0 +1,64 @@
|
||||
#pragma once
|
||||
|
||||
#include <d3d11.h>
|
||||
#include <wrl/client.h>
|
||||
#include <windows.h>
|
||||
#include <cstdint>
|
||||
|
||||
using Microsoft::WRL::ComPtr;
|
||||
|
||||
struct IddSharedHeader {
|
||||
uint32_t width;
|
||||
uint32_t height;
|
||||
uint32_t format; // 0 = BGRA32
|
||||
uint32_t stride; // bytes per row
|
||||
uint64_t timestamp;
|
||||
uint32_t frameId;
|
||||
uint32_t dataSize;
|
||||
};
|
||||
|
||||
class IddBridge {
|
||||
public:
|
||||
IddBridge();
|
||||
~IddBridge();
|
||||
|
||||
bool Initialize();
|
||||
bool CaptureFrame(ComPtr<ID3D11Texture2D>& texture);
|
||||
void ReleaseFrame();
|
||||
ID3D11Device* GetDevice() const { return device_.Get(); }
|
||||
ID3D11DeviceContext* GetContext() const { return context_.Get(); }
|
||||
|
||||
private:
|
||||
bool EnsureTexture(int width, int height);
|
||||
|
||||
ComPtr<ID3D11Device> device_;
|
||||
ComPtr<ID3D11DeviceContext> context_;
|
||||
ComPtr<ID3D11Texture2D> gpuTexture_;
|
||||
bool frame_acquired_ = false;
|
||||
|
||||
HANDLE hMap_ = nullptr;
|
||||
HANDLE hReadyEvent_ = nullptr;
|
||||
HANDLE hConsumedEvent_ = nullptr;
|
||||
uint8_t* shared_ = nullptr;
|
||||
size_t shared_size_ = 0;
|
||||
int tex_width_ = 0;
|
||||
int tex_height_ = 0;
|
||||
};
|
||||
|
||||
class IddProducer {
|
||||
public:
|
||||
IddProducer();
|
||||
~IddProducer();
|
||||
|
||||
bool Initialize();
|
||||
// Submit a frame. Data should be BGRA32.
|
||||
bool SubmitFrame(const void* data, uint32_t width, uint32_t height, uint32_t stride);
|
||||
|
||||
private:
|
||||
HANDLE hMap_ = nullptr;
|
||||
HANDLE hReadyEvent_ = nullptr;
|
||||
HANDLE hConsumedEvent_ = nullptr;
|
||||
uint8_t* shared_ = nullptr;
|
||||
size_t shared_size_ = 0;
|
||||
uint32_t frameId_ = 0;
|
||||
};
|
||||
@@ -71,6 +71,48 @@ bool ScreenCapture::Initialize() {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ScreenCapture::InitializeWithOutputIndex(int index) {
|
||||
HRESULT hr = S_OK;
|
||||
D3D_FEATURE_LEVEL featureLevels[] = {
|
||||
D3D_FEATURE_LEVEL_11_1,
|
||||
D3D_FEATURE_LEVEL_11_0,
|
||||
};
|
||||
D3D_FEATURE_LEVEL featureLevel;
|
||||
hr = D3D11CreateDevice(
|
||||
nullptr,
|
||||
D3D_DRIVER_TYPE_HARDWARE,
|
||||
nullptr,
|
||||
D3D11_CREATE_DEVICE_BGRA_SUPPORT,
|
||||
featureLevels,
|
||||
ARRAYSIZE(featureLevels),
|
||||
D3D11_SDK_VERSION,
|
||||
&device_,
|
||||
&featureLevel,
|
||||
&context_
|
||||
);
|
||||
if (FAILED(hr)) {
|
||||
return false;
|
||||
}
|
||||
ComPtr<IDXGIDevice> dxgiDevice;
|
||||
hr = device_.As(&dxgiDevice);
|
||||
if (FAILED(hr)) return false;
|
||||
ComPtr<IDXGIAdapter> dxgiAdapter;
|
||||
hr = dxgiDevice->GetAdapter(&dxgiAdapter);
|
||||
if (FAILED(hr)) return false;
|
||||
ComPtr<IDXGIOutput> dxgiOutput;
|
||||
hr = dxgiAdapter->EnumOutputs(index, &dxgiOutput);
|
||||
if (FAILED(hr)) {
|
||||
return false;
|
||||
}
|
||||
ComPtr<IDXGIOutput1> dxgiOutput1;
|
||||
hr = dxgiOutput.As(&dxgiOutput1);
|
||||
if (FAILED(hr)) return false;
|
||||
hr = dxgiOutput1->DuplicateOutput(device_.Get(), &duplication_);
|
||||
if (FAILED(hr)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
bool ScreenCapture::CaptureFrame(ComPtr<ID3D11Texture2D>& texture) {
|
||||
if (frame_acquired_) {
|
||||
ReleaseFrame();
|
||||
|
||||
@@ -13,6 +13,7 @@ public:
|
||||
~ScreenCapture();
|
||||
|
||||
bool Initialize();
|
||||
bool InitializeWithOutputIndex(int index);
|
||||
bool CaptureFrame(ComPtr<ID3D11Texture2D>& texture);
|
||||
void ReleaseFrame();
|
||||
|
||||
|
||||
@@ -4,11 +4,7 @@
|
||||
VideoEncoder::VideoEncoder() = default;
|
||||
|
||||
VideoEncoder::~VideoEncoder() {
|
||||
if (swsContext_) sws_freeContext(swsContext_);
|
||||
if (codecContext_) avcodec_free_context(&codecContext_);
|
||||
if (frame_) av_frame_free(&frame_);
|
||||
if (packet_) av_packet_free(&packet_);
|
||||
if (stagingTexture_) stagingTexture_.Reset();
|
||||
Release();
|
||||
}
|
||||
|
||||
bool VideoEncoder::Initialize(ID3D11Device* device, int width, int height, int fps, int bitrate) {
|
||||
@@ -34,7 +30,8 @@ bool VideoEncoder::Initialize(ID3D11Device* device, int width, int height, int f
|
||||
return false;
|
||||
}
|
||||
|
||||
// 2. Initialize FFmpeg
|
||||
// 2. Initialize Encoder
|
||||
#ifndef NO_FFMPEG
|
||||
const AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
|
||||
if (!codec) {
|
||||
std::cerr << "Codec H.264 not found" << std::endl;
|
||||
@@ -84,10 +81,27 @@ bool VideoEncoder::Initialize(ID3D11Device* device, int width, int height, int f
|
||||
std::cerr << "Could not allocate packet" << std::endl;
|
||||
return false;
|
||||
}
|
||||
#else
|
||||
// Stub path without FFmpeg
|
||||
#endif
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void VideoEncoder::Release() {
|
||||
if (swsContext_) { sws_freeContext(swsContext_); swsContext_ = nullptr; }
|
||||
if (codecContext_) { avcodec_free_context(&codecContext_); codecContext_ = nullptr; }
|
||||
if (frame_) { av_frame_free(&frame_); frame_ = nullptr; }
|
||||
if (packet_) { av_packet_free(&packet_); packet_ = nullptr; }
|
||||
if (stagingTexture_) { stagingTexture_.Reset(); }
|
||||
pts_ = 0;
|
||||
}
|
||||
|
||||
bool VideoEncoder::Reinitialize(int width, int height, int fps, int bitrate) {
|
||||
Release();
|
||||
return Initialize(device_, width, height, fps, bitrate);
|
||||
}
|
||||
|
||||
bool VideoEncoder::EncodeFrame(ID3D11Texture2D* texture, std::vector<uint8_t>& outputData, bool& isKeyFrame) {
|
||||
if (!texture || !stagingTexture_ || !context_) return false;
|
||||
|
||||
@@ -99,6 +113,7 @@ bool VideoEncoder::EncodeFrame(ID3D11Texture2D* texture, std::vector<uint8_t>& o
|
||||
HRESULT hr = context_->Map(stagingTexture_.Get(), 0, D3D11_MAP_READ, 0, &mapped);
|
||||
if (FAILED(hr)) return false;
|
||||
|
||||
#ifndef NO_FFMPEG
|
||||
// 3. Convert BGRA to YUV420P
|
||||
if (!swsContext_) {
|
||||
swsContext_ = sws_getContext(
|
||||
@@ -111,13 +126,12 @@ bool VideoEncoder::EncodeFrame(ID3D11Texture2D* texture, std::vector<uint8_t>& o
|
||||
uint8_t* srcSlice[] = { (uint8_t*)mapped.pData };
|
||||
int srcStride[] = { (int)mapped.RowPitch };
|
||||
|
||||
// We need to handle potential padding in mapped.RowPitch vs width*4
|
||||
// FFmpeg handles strides correctly.
|
||||
|
||||
sws_scale(swsContext_, srcSlice, srcStride, 0, height_, frame_->data, frame_->linesize);
|
||||
#endif
|
||||
|
||||
context_->Unmap(stagingTexture_.Get(), 0);
|
||||
|
||||
#ifndef NO_FFMPEG
|
||||
// 4. Encode
|
||||
frame_->pts = pts_++;
|
||||
|
||||
@@ -141,6 +155,10 @@ bool VideoEncoder::EncodeFrame(ID3D11Texture2D* texture, std::vector<uint8_t>& o
|
||||
|
||||
av_packet_unref(packet_);
|
||||
}
|
||||
#else
|
||||
// Stub: no encoding, but pipeline succeeds
|
||||
isKeyFrame = false;
|
||||
#endif
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -5,12 +5,14 @@
|
||||
#include <vector>
|
||||
#include <cstdint>
|
||||
|
||||
#ifndef NO_FFMPEG
|
||||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/imgutils.h>
|
||||
#include <libavutil/opt.h>
|
||||
#include <libswscale/swscale.h>
|
||||
}
|
||||
#endif
|
||||
|
||||
using Microsoft::WRL::ComPtr;
|
||||
|
||||
@@ -20,17 +22,26 @@ public:
|
||||
~VideoEncoder();
|
||||
|
||||
bool Initialize(ID3D11Device* device, int width, int height, int fps, int bitrate);
|
||||
bool Reinitialize(int width, int height, int fps, int bitrate);
|
||||
bool EncodeFrame(ID3D11Texture2D* texture, std::vector<uint8_t>& outputData, bool& isKeyFrame);
|
||||
void Release();
|
||||
|
||||
private:
|
||||
ID3D11Device* device_ = nullptr;
|
||||
ID3D11DeviceContext* context_ = nullptr;
|
||||
ComPtr<ID3D11Texture2D> stagingTexture_;
|
||||
|
||||
#ifndef NO_FFMPEG
|
||||
AVCodecContext* codecContext_ = nullptr;
|
||||
AVFrame* frame_ = nullptr;
|
||||
AVPacket* packet_ = nullptr;
|
||||
SwsContext* swsContext_ = nullptr;
|
||||
#else
|
||||
void* codecContext_ = nullptr;
|
||||
void* frame_ = nullptr;
|
||||
void* packet_ = nullptr;
|
||||
void* swsContext_ = nullptr;
|
||||
#endif
|
||||
|
||||
int width_ = 0;
|
||||
int height_ = 0;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#include "NetworkSender.h"
|
||||
#include "ScreenCapture.h"
|
||||
#include "IddBridge.h"
|
||||
#include "VideoEncoder.h"
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
@@ -7,15 +8,36 @@
|
||||
#include <thread>
|
||||
#include <chrono>
|
||||
#include <fstream>
|
||||
#include <windows.h>
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
std::string ipStr = "127.0.0.1";
|
||||
int port = 8888;
|
||||
std::string outputFileName = "";
|
||||
std::string source = "screen";
|
||||
int outputIndex = -1;
|
||||
bool iddProducer = false;
|
||||
int producerOutputIndex = -1;
|
||||
|
||||
if (argc > 1) ipStr = argv[1];
|
||||
if (argc > 2) port = std::stoi(argv[2]);
|
||||
if (argc > 3) outputFileName = argv[3];
|
||||
for (int i = 4; i < argc; ++i) {
|
||||
std::string arg = argv[i];
|
||||
if (arg.rfind("--source=", 0) == 0) {
|
||||
source = arg.substr(std::string("--source=").size());
|
||||
} else if (arg.rfind("--output=", 0) == 0) {
|
||||
try {
|
||||
outputIndex = std::stoi(arg.substr(std::string("--output=").size()));
|
||||
} catch (...) {}
|
||||
} else if (arg == "--idd-producer") {
|
||||
iddProducer = true;
|
||||
} else if (arg.rfind("--producer-output=", 0) == 0) {
|
||||
try {
|
||||
producerOutputIndex = std::stoi(arg.substr(std::string("--producer-output=").size()));
|
||||
} catch (...) {}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse IPs
|
||||
std::vector<std::string> ips;
|
||||
@@ -33,9 +55,19 @@ int main(int argc, char* argv[]) {
|
||||
std::cout << "Targets: ";
|
||||
for (const auto& ip : ips) std::cout << ip << " ";
|
||||
std::cout << ", Port: " << port << std::endl;
|
||||
std::cout << "Source: " << source << std::endl;
|
||||
if (source == "screen" && outputIndex >= 0) {
|
||||
std::cout << "Output Index: " << outputIndex << std::endl;
|
||||
}
|
||||
if (!outputFileName.empty()) {
|
||||
std::cout << "Output File: " << outputFileName << std::endl;
|
||||
}
|
||||
if (source == "idd" && iddProducer) {
|
||||
std::cout << "IDD Producer enabled" << std::endl;
|
||||
if (producerOutputIndex >= 0) {
|
||||
std::cout << "Producer Output Index: " << producerOutputIndex << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
// Debug: Open file to save H.264 stream if filename is provided
|
||||
std::ofstream outFile;
|
||||
@@ -48,29 +80,96 @@ int main(int argc, char* argv[]) {
|
||||
}
|
||||
}
|
||||
|
||||
std::thread producerThread;
|
||||
if (source == "idd" && iddProducer) {
|
||||
producerThread = std::thread([producerOutputIndex]() {
|
||||
ScreenCapture cap;
|
||||
bool okCap = false;
|
||||
if (producerOutputIndex >= 0) okCap = cap.InitializeWithOutputIndex(producerOutputIndex);
|
||||
else okCap = cap.Initialize();
|
||||
if (!okCap) return;
|
||||
D3D11_TEXTURE2D_DESC desc = {};
|
||||
ComPtr<ID3D11Texture2D> frame;
|
||||
while (!cap.CaptureFrame(frame)) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
frame->GetDesc(&desc);
|
||||
cap.ReleaseFrame();
|
||||
D3D11_TEXTURE2D_DESC sdesc = {};
|
||||
sdesc.Width = desc.Width;
|
||||
sdesc.Height = desc.Height;
|
||||
sdesc.MipLevels = 1;
|
||||
sdesc.ArraySize = 1;
|
||||
sdesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
|
||||
sdesc.SampleDesc.Count = 1;
|
||||
sdesc.Usage = D3D11_USAGE_STAGING;
|
||||
sdesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
|
||||
ComPtr<ID3D11Texture2D> staging;
|
||||
cap.GetDevice()->CreateTexture2D(&sdesc, nullptr, &staging);
|
||||
|
||||
IddProducer producer;
|
||||
if (!producer.Initialize()) return;
|
||||
|
||||
while (true) {
|
||||
ComPtr<ID3D11Texture2D> tex;
|
||||
if (cap.CaptureFrame(tex)) {
|
||||
cap.GetContext()->CopyResource(staging.Get(), tex.Get());
|
||||
D3D11_MAPPED_SUBRESOURCE mapped = {};
|
||||
if (SUCCEEDED(cap.GetContext()->Map(staging.Get(), 0, D3D11_MAP_READ, 0, &mapped))) {
|
||||
producer.SubmitFrame(mapped.pData, desc.Width, desc.Height, (uint32_t)mapped.RowPitch);
|
||||
cap.GetContext()->Unmap(staging.Get(), 0);
|
||||
}
|
||||
cap.ReleaseFrame();
|
||||
} else {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(5));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
bool ok = false;
|
||||
ScreenCapture capture;
|
||||
if (!capture.Initialize()) {
|
||||
std::cerr << "Failed to initialize Screen Capture" << std::endl;
|
||||
IddBridge idd;
|
||||
bool useIdd = (source == "idd");
|
||||
if (useIdd) {
|
||||
ok = idd.Initialize();
|
||||
} else {
|
||||
if (outputIndex >= 0) {
|
||||
ok = capture.InitializeWithOutputIndex(outputIndex);
|
||||
} else {
|
||||
ok = capture.Initialize();
|
||||
}
|
||||
}
|
||||
if (!ok) {
|
||||
std::cerr << "Failed to initialize capture source" << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Get screen size
|
||||
D3D11_TEXTURE2D_DESC desc;
|
||||
ComPtr<ID3D11Texture2D> frame;
|
||||
// Capture one frame to get size
|
||||
std::cout << "Waiting for first frame..." << std::endl;
|
||||
while (!capture.CaptureFrame(frame)) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
if (useIdd) {
|
||||
while (!idd.CaptureFrame(frame)) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
} else {
|
||||
while (!capture.CaptureFrame(frame)) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
}
|
||||
frame->GetDesc(&desc);
|
||||
capture.ReleaseFrame();
|
||||
if (useIdd) {
|
||||
idd.ReleaseFrame();
|
||||
} else {
|
||||
capture.ReleaseFrame();
|
||||
}
|
||||
|
||||
int width = desc.Width;
|
||||
int height = desc.Height;
|
||||
std::cout << "Screen Size: " << width << "x" << height << std::endl;
|
||||
|
||||
VideoEncoder encoder;
|
||||
if (!encoder.Initialize(capture.GetDevice(), width, height, 60, 4000000)) { // 4Mbps
|
||||
if (!encoder.Initialize(useIdd ? idd.GetDevice() : capture.GetDevice(), width, height, 60, 4000000)) {
|
||||
std::cerr << "Failed to initialize Video Encoder" << std::endl;
|
||||
return 1;
|
||||
}
|
||||
@@ -88,7 +187,24 @@ int main(int argc, char* argv[]) {
|
||||
|
||||
while (true) {
|
||||
ComPtr<ID3D11Texture2D> texture;
|
||||
if (capture.CaptureFrame(texture)) {
|
||||
bool got = false;
|
||||
if (useIdd) {
|
||||
got = idd.CaptureFrame(texture);
|
||||
} else {
|
||||
got = capture.CaptureFrame(texture);
|
||||
}
|
||||
if (got) {
|
||||
D3D11_TEXTURE2D_DESC tdesc = {};
|
||||
texture->GetDesc(&tdesc);
|
||||
if (tdesc.Width != (UINT)width || tdesc.Height != (UINT)height) {
|
||||
std::cout << "Resolution changed: " << width << "x" << height << " -> " << tdesc.Width << "x" << tdesc.Height << std::endl;
|
||||
width = (int)tdesc.Width;
|
||||
height = (int)tdesc.Height;
|
||||
if (!encoder.Reinitialize(width, height, 60, 4000000)) {
|
||||
std::cerr << "Failed to reinitialize encoder on resolution change" << std::endl;
|
||||
break;
|
||||
}
|
||||
}
|
||||
std::vector<uint8_t> encodedData;
|
||||
bool isKeyFrame = false;
|
||||
|
||||
@@ -106,7 +222,11 @@ int main(int argc, char* argv[]) {
|
||||
}
|
||||
}
|
||||
}
|
||||
capture.ReleaseFrame();
|
||||
if (useIdd) {
|
||||
idd.ReleaseFrame();
|
||||
} else {
|
||||
capture.ReleaseFrame();
|
||||
}
|
||||
|
||||
frameCount++;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user