Use factory method to create screen capturer

This commit is contained in:
dijunkun
2023-12-15 17:29:17 +08:00
parent 9276d5bfec
commit 2fc89923ae
17 changed files with 256 additions and 260 deletions

View File

@@ -0,0 +1,143 @@
#include "screen_capturer_x11.h"
#include <iostream>
#include "log.h"
#define NV12_BUFFER_SIZE 1280 * 720 * 3 / 2
unsigned char nv12_buffer_[NV12_BUFFER_SIZE];
ScreenCapturerX11::ScreenCapturerX11() {}
ScreenCapturerX11::~ScreenCapturerX11() {}
int ScreenCapturerX11::Init(const RECORD_DESKTOP_RECT &rect, const int fps,
cb_desktop_data cb) {
if (cb) {
_on_data = cb;
}
fps_ = fps;
av_log_set_level(AV_LOG_QUIET);
pFormatCtx_ = avformat_alloc_context();
avdevice_register_all();
// grabbing frame rate
av_dict_set(&options_, "framerate", "30", 0);
// Make the grabbed area follow the mouse
// av_dict_set(&options_, "follow_mouse", "centered", 0);
// Video frame size. The default is to capture the full screen
// av_dict_set(&options_, "video_size", "1280x720", 0);
std::string capture_method = "x11grab";
ifmt_ = (AVInputFormat *)av_find_input_format(capture_method.c_str());
if (!ifmt_) {
LOG_ERROR("Couldn't find_input_format [{}]", capture_method.c_str());
}
// Grab at position 10,20
if (avformat_open_input(&pFormatCtx_, ":0.0", ifmt_, &options_) != 0) {
printf("Couldn't open input stream.\n");
return -1;
}
if (avformat_find_stream_info(pFormatCtx_, NULL) < 0) {
printf("Couldn't find stream information.\n");
return -1;
}
videoindex_ = -1;
for (i_ = 0; i_ < pFormatCtx_->nb_streams; i_++)
if (pFormatCtx_->streams[i_]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex_ = i_;
break;
}
if (videoindex_ == -1) {
printf("Didn't find a video stream.\n");
return -1;
}
pCodecParam_ = pFormatCtx_->streams[videoindex_]->codecpar;
pCodecCtx_ = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(pCodecCtx_, pCodecParam_);
pCodec_ = const_cast<AVCodec *>(avcodec_find_decoder(pCodecCtx_->codec_id));
if (pCodec_ == NULL) {
printf("Codec not found.\n");
return -1;
}
if (avcodec_open2(pCodecCtx_, pCodec_, NULL) < 0) {
printf("Could not open codec.\n");
return -1;
}
const int screen_w = pFormatCtx_->streams[videoindex_]->codecpar->width;
const int screen_h = pFormatCtx_->streams[videoindex_]->codecpar->height;
pFrame_ = av_frame_alloc();
pFrameNv12_ = av_frame_alloc();
pFrame_->width = screen_w;
pFrame_->height = screen_h;
pFrameNv12_->width = 1280;
pFrameNv12_->height = 720;
packet_ = (AVPacket *)av_malloc(sizeof(AVPacket));
img_convert_ctx_ = sws_getContext(
pFrame_->width, pFrame_->height, pCodecCtx_->pix_fmt, pFrameNv12_->width,
pFrameNv12_->height, AV_PIX_FMT_NV12, SWS_BICUBIC, NULL, NULL, NULL);
return 0;
}
int ScreenCapturerX11::Destroy() {
if (capture_thread_->joinable()) {
capture_thread_->join();
}
return 0;
}
int ScreenCapturerX11::Start() {
capture_thread_.reset(new std::thread([this]() {
while (1) {
if (av_read_frame(pFormatCtx_, packet_) >= 0) {
if (packet_->stream_index == videoindex_) {
avcodec_send_packet(pCodecCtx_, packet_);
av_packet_unref(packet_);
got_picture_ = avcodec_receive_frame(pCodecCtx_, pFrame_);
if (!got_picture_) {
av_image_fill_arrays(pFrameNv12_->data, pFrameNv12_->linesize,
nv12_buffer_, AV_PIX_FMT_NV12,
pFrameNv12_->width, pFrameNv12_->height, 1);
sws_scale(img_convert_ctx_, pFrame_->data, pFrame_->linesize, 0,
pFrame_->height, pFrameNv12_->data,
pFrameNv12_->linesize);
_on_data((unsigned char *)nv12_buffer_,
pFrameNv12_->width * pFrameNv12_->height * 3 / 2,
pFrameNv12_->width, pFrameNv12_->height);
}
}
}
}
}));
return 0;
}
int ScreenCapturerX11::Pause() { return 0; }
int ScreenCapturerX11::Resume() { return 0; }
int ScreenCapturerX11::Stop() { return 0; }
void ScreenCapturerX11::OnFrame() {}
void ScreenCapturerX11::CleanUp() {}

View File

@@ -0,0 +1,80 @@
#ifndef _SCREEN_CAPTURER_X11_H_
#define _SCREEN_CAPTURER_X11_H_
#include <atomic>
#include <functional>
#include <string>
#include <thread>
#include "screen_capturer.h"
#ifdef __cplusplus
extern "C" {
#endif
#include <libavcodec/avcodec.h>
#include <libavdevice/avdevice.h>
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#ifdef __cplusplus
};
#endif
class ScreenCapturerX11 : public ScreenCapturer {
public:
ScreenCapturerX11();
~ScreenCapturerX11();
public:
virtual int Init(const RECORD_DESKTOP_RECT &rect, const int fps,
cb_desktop_data cb);
virtual int Destroy();
virtual int Start();
int Pause();
int Resume();
int Stop();
void OnFrame();
protected:
void CleanUp();
private:
std::atomic_bool _running;
std::atomic_bool _paused;
std::atomic_bool _inited;
std::thread _thread;
std::string _device_name;
RECORD_DESKTOP_RECT _rect;
int _fps;
cb_desktop_data _on_data;
private:
int i_ = 0;
int videoindex_ = 0;
int got_picture_ = 0;
int fps_ = 0;
// ffmpeg
AVFormatContext *pFormatCtx_ = nullptr;
AVCodecContext *pCodecCtx_ = nullptr;
AVCodec *pCodec_ = nullptr;
AVCodecParameters *pCodecParam_ = nullptr;
AVDictionary *options_ = nullptr;
AVInputFormat *ifmt_ = nullptr;
AVFrame *pFrame_ = nullptr;
AVFrame *pFrameNv12_ = nullptr;
AVPacket *packet_ = nullptr;
struct SwsContext *img_convert_ctx_ = nullptr;
// thread
std::unique_ptr<std::thread> capture_thread_ = nullptr;
};
#endif

View File

@@ -0,0 +1,37 @@
#ifndef _X11_SESSION_H_
#define _X11_SESSION_H_
class X11Session {
public:
struct x11_session_frame {
unsigned int width;
unsigned int height;
unsigned int row_pitch;
const unsigned char *data;
};
class x11_session_observer {
public:
virtual ~x11_session_observer() {}
virtual void OnFrame(const x11_session_frame &frame) = 0;
};
public:
virtual void Release() = 0;
virtual int Initialize() = 0;
virtual void RegisterObserver(x11_session_observer *observer) = 0;
virtual int Start() = 0;
virtual int Stop() = 0;
virtual int Pause() = 0;
virtual int Resume() = 0;
protected:
virtual ~X11Session(){};
};
#endif

View File

@@ -0,0 +1,49 @@
#include "x11_session_impl.h"
#include <atomic>
#include <functional>
#include <iostream>
#include <memory>
#define CHECK_INIT \
if (!is_initialized_) { \
std::cout << "AE_NEED_INIT" << std::endl; \
return 4; \
}
X11SessionImpl::X11SessionImpl() {}
X11SessionImpl::~X11SessionImpl() {
Stop();
CleanUp();
}
void X11SessionImpl::Release() { delete this; }
int X11SessionImpl::Initialize() { return 0; }
void X11SessionImpl::RegisterObserver(x11_session_observer *observer) {
observer_ = observer;
}
int X11SessionImpl::Start() {
if (is_running_) return 0;
int error = 1;
CHECK_INIT;
return error;
}
int X11SessionImpl::Stop() { return 0; }
int X11SessionImpl::Pause() { return 0; }
int X11SessionImpl::Resume() { return 0; }
void X11SessionImpl::OnFrame() {}
void X11SessionImpl::OnClosed() {}
void X11SessionImpl::CleanUp() {}

View File

@@ -0,0 +1,44 @@
#ifndef _WGC_SESSION_IMPL_H_
#define _WGC_SESSION_IMPL_H_
#include <mutex>
#include <thread>
#include "x11_session.h"
class X11SessionImpl : public X11Session {
public:
X11SessionImpl();
~X11SessionImpl() override;
public:
void Release() override;
int Initialize() override;
void RegisterObserver(x11_session_observer *observer) override;
int Start() override;
int Stop() override;
int Pause() override;
int Resume() override;
private:
void OnFrame();
void OnClosed();
void CleanUp();
// void message_func();
private:
std::mutex lock_;
bool is_initialized_ = false;
bool is_running_ = false;
bool is_paused_ = false;
x11_session_observer *observer_ = nullptr;
};
#endif

View File

@@ -0,0 +1,144 @@
#include "screen_capturer_avf.h"
#include <iostream>
#include "log.h"
#define NV12_BUFFER_SIZE 1280 * 720 * 3 / 2
unsigned char nv12_buffer_[NV12_BUFFER_SIZE];
ScreenCapturerAvf::ScreenCapturerAvf() {}
ScreenCapturerAvf::~ScreenCapturerAvf() {}
int ScreenCapturerAvf::Init(const RECORD_DESKTOP_RECT &rect, const int fps,
cb_desktop_data cb) {
if (cb) {
_on_data = cb;
}
av_log_set_level(AV_LOG_QUIET);
pFormatCtx_ = avformat_alloc_context();
avdevice_register_all();
// grabbing frame rate
av_dict_set(&options_, "framerate", "60", 0);
av_dict_set(&options_, "pixel_format", "nv12", 0);
// show remote cursor
av_dict_set(&options_, "capture_cursor", "1", 0);
// Make the grabbed area follow the mouse
// av_dict_set(&options_, "follow_mouse", "centered", 0);
// Video frame size. The default is to capture the full screen
// av_dict_set(&options_, "video_size", "1280x720", 0);
ifmt_ = (AVInputFormat *)av_find_input_format("avfoundation");
if (!ifmt_) {
printf("Couldn't find_input_format\n");
}
// Grab at position 10,20
if (avformat_open_input(&pFormatCtx_, "Capture screen 0", ifmt_, &options_) !=
0) {
printf("Couldn't open input stream.\n");
return -1;
}
if (avformat_find_stream_info(pFormatCtx_, NULL) < 0) {
printf("Couldn't find stream information.\n");
return -1;
}
videoindex_ = -1;
for (i_ = 0; i_ < pFormatCtx_->nb_streams; i_++)
if (pFormatCtx_->streams[i_]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex_ = i_;
break;
}
if (videoindex_ == -1) {
printf("Didn't find a video stream.\n");
return -1;
}
pCodecParam_ = pFormatCtx_->streams[videoindex_]->codecpar;
pCodecCtx_ = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(pCodecCtx_, pCodecParam_);
pCodec_ = const_cast<AVCodec *>(avcodec_find_decoder(pCodecCtx_->codec_id));
if (pCodec_ == NULL) {
printf("Codec not found.\n");
return -1;
}
if (avcodec_open2(pCodecCtx_, pCodec_, NULL) < 0) {
printf("Could not open codec.\n");
return -1;
}
const int screen_w = pFormatCtx_->streams[videoindex_]->codecpar->width;
const int screen_h = pFormatCtx_->streams[videoindex_]->codecpar->height;
pFrame_ = av_frame_alloc();
pFrameNv12_ = av_frame_alloc();
pFrame_->width = screen_w;
pFrame_->height = screen_h;
pFrameNv12_->width = 1280;
pFrameNv12_->height = 720;
packet_ = (AVPacket *)av_malloc(sizeof(AVPacket));
img_convert_ctx_ = sws_getContext(
pFrame_->width, pFrame_->height, pCodecCtx_->pix_fmt, pFrameNv12_->width,
pFrameNv12_->height, AV_PIX_FMT_NV12, SWS_BICUBIC, NULL, NULL, NULL);
return 0;
}
int ScreenCapturerAvf::Destroy() {
if (capture_thread_->joinable()) {
capture_thread_->join();
}
return 0;
}
int ScreenCapturerAvf::Start() {
capture_thread_.reset(new std::thread([this]() {
while (1) {
if (av_read_frame(pFormatCtx_, packet_) >= 0) {
if (packet_->stream_index == videoindex_) {
avcodec_send_packet(pCodecCtx_, packet_);
av_packet_unref(packet_);
got_picture_ = avcodec_receive_frame(pCodecCtx_, pFrame_);
if (!got_picture_) {
av_image_fill_arrays(pFrameNv12_->data, pFrameNv12_->linesize,
nv12_buffer_, AV_PIX_FMT_NV12,
pFrameNv12_->width, pFrameNv12_->height, 1);
sws_scale(img_convert_ctx_, pFrame_->data, pFrame_->linesize, 0,
pFrame_->height, pFrameNv12_->data,
pFrameNv12_->linesize);
_on_data((unsigned char *)nv12_buffer_,
pFrameNv12_->width * pFrameNv12_->height * 3 / 2,
pFrameNv12_->width, pFrameNv12_->height);
}
}
}
}
}));
return 0;
}
int ScreenCapturerAvf::Pause() { return 0; }
int ScreenCapturerAvf::Resume() { return 0; }
int ScreenCapturerAvf::Stop() { return 0; }
void ScreenCapturerAvf::OnFrame() {}
void ScreenCapturerAvf::CleanUp() {}

View File

@@ -0,0 +1,85 @@
/*
* @Author: DI JUNKUN
* @Date: 2023-12-01
* Copyright (c) 2023 by DI JUNKUN, All Rights Reserved.
*/
#ifndef _SCREEN_CAPTURER_AVF_H_
#define _SCREEN_CAPTURER_AVF_H_
#include <atomic>
#include <functional>
#include <string>
#include <thread>
#include "screen_capturer.h"
#ifdef __cplusplus
extern "C" {
#endif
#include <libavcodec/avcodec.h>
#include <libavdevice/avdevice.h>
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#ifdef __cplusplus
};
#endif
class ScreenCapturerAvf : public ScreenCapturer {
public:
ScreenCapturerAvf();
~ScreenCapturerAvf();
public:
virtual int Init(const RECORD_DESKTOP_RECT &rect, const int fps,
cb_desktop_data cb);
virtual int Destroy();
virtual int Start();
int Pause();
int Resume();
int Stop();
void OnFrame();
protected:
void CleanUp();
private:
std::atomic_bool _running;
std::atomic_bool _paused;
std::atomic_bool _inited;
std::thread _thread;
std::string _device_name;
RECORD_DESKTOP_RECT _rect;
int _fps;
cb_desktop_data _on_data;
private:
int i_ = 0;
int videoindex_ = 0;
int got_picture_ = 0;
// ffmpeg
AVFormatContext *pFormatCtx_ = nullptr;
AVCodecContext *pCodecCtx_ = nullptr;
AVCodec *pCodec_ = nullptr;
AVCodecParameters *pCodecParam_ = nullptr;
AVDictionary *options_ = nullptr;
AVInputFormat *ifmt_ = nullptr;
AVFrame *pFrame_ = nullptr;
AVFrame *pFrameNv12_ = nullptr;
AVPacket *packet_ = nullptr;
struct SwsContext *img_convert_ctx_ = nullptr;
// thread
std::unique_ptr<std::thread> capture_thread_ = nullptr;
};
#endif

View File

@@ -0,0 +1,33 @@
/*
* @Author: DI JUNKUN
* @Date: 2023-12-15
* Copyright (c) 2023 by DI JUNKUN, All Rights Reserved.
*/
#ifndef _SCREEN_CAPTURER_H_
#define _SCREEN_CAPTURER_H_
#include <functional>
class ScreenCapturer {
public:
typedef struct {
int left;
int top;
int right;
int bottom;
} RECORD_DESKTOP_RECT;
typedef std::function<void(unsigned char *, int, int, int)> cb_desktop_data;
public:
virtual ~ScreenCapturer() {}
public:
virtual int Init(const RECORD_DESKTOP_RECT &rect, const int fps,
cb_desktop_data cb) = 0;
virtual int Destroy() = 0;
virtual int Start() = 0;
};
#endif

View File

@@ -0,0 +1,37 @@
/*
* @Author: DI JUNKUN
* @Date: 2023-12-15
* Copyright (c) 2023 by DI JUNKUN, All Rights Reserved.
*/
#ifndef _SCREEN_CAPTURER_FACTORY_H_
#define _SCREEN_CAPTURER_FACTORY_H_
#ifdef _WIN32
#include "screen_capturer_wgc.h"
#elif __linux__
#include "screen_capturer_x11.h"
#elif __APPLE__
#include "screen_capturer_avf.h"
#endif
class ScreenCapturerFactory {
public:
virtual ~ScreenCapturerFactory() {}
public:
ScreenCapturer* Create() {
#ifdef _WIN32
return new ScreenCapturerWgc();
#elif __linux__
return new ScreenCapturerX11();
#elif __APPLE__
return new ScreenCapturerAvf();
#else
return nullptr;
#endif
}
};
#endif

View File

@@ -0,0 +1,180 @@
#include "screen_capturer_wgc.h"
#include <Windows.h>
#include <d3d11_4.h>
#include <winrt/Windows.Foundation.Metadata.h>
#include <winrt/Windows.Graphics.Capture.h>
extern "C" {
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
};
#include <iostream>
int BGRAToNV12FFmpeg(unsigned char *src_buffer, int width, int height,
unsigned char *dst_buffer) {
AVFrame *Input_pFrame = av_frame_alloc();
AVFrame *Output_pFrame = av_frame_alloc();
struct SwsContext *img_convert_ctx =
sws_getContext(width, height, AV_PIX_FMT_BGRA, 1280, 720, AV_PIX_FMT_NV12,
SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
av_image_fill_arrays(Input_pFrame->data, Input_pFrame->linesize, src_buffer,
AV_PIX_FMT_BGRA, width, height, 1);
av_image_fill_arrays(Output_pFrame->data, Output_pFrame->linesize, dst_buffer,
AV_PIX_FMT_NV12, 1280, 720, 1);
sws_scale(img_convert_ctx, (uint8_t const **)Input_pFrame->data,
Input_pFrame->linesize, 0, height, Output_pFrame->data,
Output_pFrame->linesize);
if (Input_pFrame) av_free(Input_pFrame);
if (Output_pFrame) av_free(Output_pFrame);
if (img_convert_ctx) sws_freeContext(img_convert_ctx);
return 0;
}
BOOL WINAPI EnumMonitorProc(HMONITOR hmonitor, HDC hdc, LPRECT lprc,
LPARAM data) {
MONITORINFOEX info_ex;
info_ex.cbSize = sizeof(MONITORINFOEX);
GetMonitorInfo(hmonitor, &info_ex);
if (info_ex.dwFlags == DISPLAY_DEVICE_MIRRORING_DRIVER) return true;
if (info_ex.dwFlags & MONITORINFOF_PRIMARY) {
*(HMONITOR *)data = hmonitor;
}
return true;
}
HMONITOR GetPrimaryMonitor() {
HMONITOR hmonitor = nullptr;
::EnumDisplayMonitors(NULL, NULL, EnumMonitorProc, (LPARAM)&hmonitor);
return hmonitor;
}
ScreenCapturerWgc::ScreenCapturerWgc() {}
ScreenCapturerWgc::~ScreenCapturerWgc() {}
bool ScreenCapturerWgc::IsWgcSupported() {
try {
/* no contract for IGraphicsCaptureItemInterop, verify 10.0.18362.0 */
return winrt::Windows::Foundation::Metadata::ApiInformation::
IsApiContractPresent(L"Windows.Foundation.UniversalApiContract", 8);
} catch (const winrt::hresult_error &) {
return false;
} catch (...) {
return false;
}
}
int ScreenCapturerWgc::Init(const RECORD_DESKTOP_RECT &rect, const int fps,
cb_desktop_data cb) {
int error = 0;
if (_inited == true) return error;
nv12_frame_ = new unsigned char[rect.right * rect.bottom * 4];
_fps = fps;
_on_data = cb;
do {
if (!IsWgcSupported()) {
std::cout << "AE_UNSUPPORT" << std::endl;
error = 2;
break;
}
session_ = new WgcSessionImpl();
if (!session_) {
error = -1;
std::cout << "AE_WGC_CREATE_CAPTURER_FAILED" << std::endl;
break;
}
session_->RegisterObserver(this);
error = session_->Initialize(GetPrimaryMonitor());
_inited = true;
} while (0);
if (error != 0) {
}
return error;
}
int ScreenCapturerWgc::Destroy() {
if (nv12_frame_) {
delete nv12_frame_;
nv12_frame_ = nullptr;
}
Stop();
CleanUp();
return 0;
}
int ScreenCapturerWgc::Start() {
if (_running == true) {
std::cout << "record desktop duplication is already running" << std::endl;
return 0;
}
if (_inited == false) {
std::cout << "AE_NEED_INIT" << std::endl;
return 4;
}
_running = true;
session_->Start();
return 0;
}
int ScreenCapturerWgc::Pause() {
_paused = true;
if (session_) session_->Pause();
return 0;
}
int ScreenCapturerWgc::Resume() {
_paused = false;
if (session_) session_->Resume();
return 0;
}
int ScreenCapturerWgc::Stop() {
_running = false;
if (session_) session_->Stop();
return 0;
}
void ScreenCapturerWgc::OnFrame(const WgcSession::wgc_session_frame &frame) {
if (_on_data)
BGRAToNV12FFmpeg((unsigned char *)frame.data, frame.width, frame.height,
nv12_frame_);
_on_data(nv12_frame_, frame.width * frame.height * 4, frame.width,
frame.height);
}
void ScreenCapturerWgc::CleanUp() {
_inited = false;
if (session_) session_->Release();
session_ = nullptr;
}

View File

@@ -0,0 +1,57 @@
#ifndef _SCREEN_CAPTURER_WGC_H_
#define _SCREEN_CAPTURER_WGC_H_
#include <atomic>
#include <functional>
#include <string>
#include <thread>
#include "screen_capturer.h"
#include "wgc_session.h"
#include "wgc_session_impl.h"
class ScreenCapturerWgc : public ScreenCapturer,
public WgcSession::wgc_session_observer {
public:
ScreenCapturerWgc();
~ScreenCapturerWgc();
public:
bool IsWgcSupported();
virtual int Init(const RECORD_DESKTOP_RECT &rect, const int fps,
cb_desktop_data cb);
virtual int Destroy();
virtual int Start();
int Pause();
int Resume();
int Stop();
void OnFrame(const WgcSession::wgc_session_frame &frame);
protected:
void CleanUp();
private:
WgcSession *session_ = nullptr;
std::atomic_bool _running;
std::atomic_bool _paused;
std::atomic_bool _inited;
std::thread _thread;
std::string _device_name;
RECORD_DESKTOP_RECT _rect;
int _fps;
cb_desktop_data _on_data;
unsigned char *nv12_frame_ = nullptr;
};
#endif

View File

@@ -0,0 +1,40 @@
#ifndef _WGC_SESSION_H_
#define _WGC_SESSION_H_
#include <Windows.h>
class WgcSession {
public:
struct wgc_session_frame {
unsigned int width;
unsigned int height;
unsigned int row_pitch;
const unsigned char *data;
};
class wgc_session_observer {
public:
virtual ~wgc_session_observer() {}
virtual void OnFrame(const wgc_session_frame &frame) = 0;
};
public:
virtual void Release() = 0;
virtual int Initialize(HWND hwnd) = 0;
virtual int Initialize(HMONITOR hmonitor) = 0;
virtual void RegisterObserver(wgc_session_observer *observer) = 0;
virtual int Start() = 0;
virtual int Stop() = 0;
virtual int Pause() = 0;
virtual int Resume() = 0;
protected:
virtual ~WgcSession(){};
};
#endif

View File

@@ -0,0 +1,366 @@
#include "wgc_session_impl.h"
#include <Windows.Graphics.Capture.Interop.h>
#include <atomic>
#include <functional>
#include <iostream>
#include <memory>
#define CHECK_INIT \
if (!is_initialized_) { \
std::cout << "AE_NEED_INIT" << std::endl; \
return 4; \
}
#define CHECK_CLOSED \
if (cleaned_.load() == true) { \
throw winrt::hresult_error(RO_E_CLOSED); \
}
extern "C" {
HRESULT __stdcall CreateDirect3D11DeviceFromDXGIDevice(
::IDXGIDevice *dxgiDevice, ::IInspectable **graphicsDevice);
}
WgcSessionImpl::WgcSessionImpl() {}
WgcSessionImpl::~WgcSessionImpl() {
Stop();
CleanUp();
}
void WgcSessionImpl::Release() { delete this; }
int WgcSessionImpl::Initialize(HWND hwnd) {
std::lock_guard locker(lock_);
target_.hwnd = hwnd;
target_.is_window = true;
return Initialize();
}
int WgcSessionImpl::Initialize(HMONITOR hmonitor) {
std::lock_guard locker(lock_);
target_.hmonitor = hmonitor;
target_.is_window = false;
return Initialize();
}
void WgcSessionImpl::RegisterObserver(wgc_session_observer *observer) {
std::lock_guard locker(lock_);
observer_ = observer;
}
int WgcSessionImpl::Start() {
std::lock_guard locker(lock_);
if (is_running_) return 0;
int error = 1;
CHECK_INIT;
try {
if (!capture_session_) {
auto current_size = capture_item_.Size();
capture_framepool_ =
winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool::
CreateFreeThreaded(d3d11_direct_device_,
winrt::Windows::Graphics::DirectX::
DirectXPixelFormat::B8G8R8A8UIntNormalized,
2, current_size);
capture_session_ = capture_framepool_.CreateCaptureSession(capture_item_);
capture_frame_size_ = current_size;
capture_framepool_trigger_ = capture_framepool_.FrameArrived(
winrt::auto_revoke, {this, &WgcSessionImpl::OnFrame});
capture_close_trigger_ = capture_item_.Closed(
winrt::auto_revoke, {this, &WgcSessionImpl::OnClosed});
}
if (!capture_framepool_) throw std::exception();
is_running_ = true;
// we do not need to crate a thread to enter a message loop coz we use
// CreateFreeThreaded instead of Create to create a capture frame pool,
// we need to test the performance later
// loop_ = std::thread(std::bind(&WgcSessionImpl::message_func, this));
capture_session_.StartCapture();
error = 0;
} catch (winrt::hresult_error) {
std::cout << "AE_WGC_CREATE_CAPTURER_FAILED" << std::endl;
return 86;
} catch (...) {
return 86;
}
return error;
}
int WgcSessionImpl::Stop() {
std::lock_guard locker(lock_);
CHECK_INIT;
is_running_ = false;
// if (loop_.joinable()) loop_.join();
if (capture_framepool_trigger_) capture_framepool_trigger_.revoke();
if (capture_session_) {
capture_session_.Close();
capture_session_ = nullptr;
}
return 0;
}
int WgcSessionImpl::Pause() {
std::lock_guard locker(lock_);
CHECK_INIT;
return 0;
}
int WgcSessionImpl::Resume() {
std::lock_guard locker(lock_);
CHECK_INIT;
return 0;
}
auto WgcSessionImpl::CreateD3D11Device() {
winrt::com_ptr<ID3D11Device> d3d_device;
HRESULT hr;
WINRT_ASSERT(!d3d_device);
D3D_DRIVER_TYPE type = D3D_DRIVER_TYPE_HARDWARE;
UINT flags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
hr = D3D11CreateDevice(nullptr, type, nullptr, flags, nullptr, 0,
D3D11_SDK_VERSION, d3d_device.put(), nullptr, nullptr);
if (DXGI_ERROR_UNSUPPORTED == hr) {
// change D3D_DRIVER_TYPE
D3D_DRIVER_TYPE type = D3D_DRIVER_TYPE_WARP;
hr = D3D11CreateDevice(nullptr, type, nullptr, flags, nullptr, 0,
D3D11_SDK_VERSION, d3d_device.put(), nullptr,
nullptr);
}
winrt::check_hresult(hr);
winrt::com_ptr<::IInspectable> d3d11_device;
winrt::check_hresult(CreateDirect3D11DeviceFromDXGIDevice(
d3d_device.as<IDXGIDevice>().get(), d3d11_device.put()));
return d3d11_device
.as<winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice>();
}
auto WgcSessionImpl::CreateCaptureItemForWindow(HWND hwnd) {
auto activation_factory = winrt::get_activation_factory<
winrt::Windows::Graphics::Capture::GraphicsCaptureItem>();
auto interop_factory = activation_factory.as<IGraphicsCaptureItemInterop>();
winrt::Windows::Graphics::Capture::GraphicsCaptureItem item = {nullptr};
interop_factory->CreateForWindow(
hwnd,
winrt::guid_of<ABI::Windows::Graphics::Capture::IGraphicsCaptureItem>(),
reinterpret_cast<void **>(winrt::put_abi(item)));
return item;
}
auto WgcSessionImpl::CreateCaptureItemForMonitor(HMONITOR hmonitor) {
auto activation_factory = winrt::get_activation_factory<
winrt::Windows::Graphics::Capture::GraphicsCaptureItem>();
auto interop_factory = activation_factory.as<IGraphicsCaptureItemInterop>();
winrt::Windows::Graphics::Capture::GraphicsCaptureItem item = {nullptr};
interop_factory->CreateForMonitor(
hmonitor,
winrt::guid_of<ABI::Windows::Graphics::Capture::IGraphicsCaptureItem>(),
reinterpret_cast<void **>(winrt::put_abi(item)));
return item;
}
HRESULT WgcSessionImpl::CreateMappedTexture(
winrt::com_ptr<ID3D11Texture2D> src_texture, unsigned int width,
unsigned int height) {
D3D11_TEXTURE2D_DESC src_desc;
src_texture->GetDesc(&src_desc);
D3D11_TEXTURE2D_DESC map_desc;
map_desc.Width = width == 0 ? src_desc.Width : width;
map_desc.Height = height == 0 ? src_desc.Height : height;
map_desc.MipLevels = src_desc.MipLevels;
map_desc.ArraySize = src_desc.ArraySize;
map_desc.Format = src_desc.Format;
map_desc.SampleDesc = src_desc.SampleDesc;
map_desc.Usage = D3D11_USAGE_STAGING;
map_desc.BindFlags = 0;
map_desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
map_desc.MiscFlags = 0;
auto d3dDevice =
GetDXGIInterfaceFromObject<ID3D11Device>(d3d11_direct_device_);
return d3dDevice->CreateTexture2D(&map_desc, nullptr,
d3d11_texture_mapped_.put());
}
void WgcSessionImpl::OnFrame(
winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool const &sender,
winrt::Windows::Foundation::IInspectable const &args) {
std::lock_guard locker(lock_);
auto is_new_size = false;
{
auto frame = sender.TryGetNextFrame();
auto frame_size = frame.ContentSize();
if (frame_size.Width != capture_frame_size_.Width ||
frame_size.Height != capture_frame_size_.Height) {
// The thing we have been capturing has changed size.
// We need to resize our swap chain first, then blit the pixels.
// After we do that, retire the frame and then recreate our frame pool.
is_new_size = true;
capture_frame_size_ = frame_size;
}
// copy to mapped texture
{
auto frame_captured =
GetDXGIInterfaceFromObject<ID3D11Texture2D>(frame.Surface());
if (!d3d11_texture_mapped_ || is_new_size)
CreateMappedTexture(frame_captured);
d3d11_device_context_->CopyResource(d3d11_texture_mapped_.get(),
frame_captured.get());
D3D11_MAPPED_SUBRESOURCE map_result;
HRESULT hr = d3d11_device_context_->Map(
d3d11_texture_mapped_.get(), 0, D3D11_MAP_READ,
0 /*coz we use CreateFreeThreaded, so we cant use flags
D3D11_MAP_FLAG_DO_NOT_WAIT*/
,
&map_result);
if (FAILED(hr)) {
OutputDebugStringW(
(L"map resource failed: " + std::to_wstring(hr)).c_str());
}
// copy data from map_result.pData
if (map_result.pData && observer_) {
observer_->OnFrame(wgc_session_frame{
static_cast<unsigned int>(frame_size.Width),
static_cast<unsigned int>(frame_size.Height), map_result.RowPitch,
const_cast<const unsigned char *>(
(unsigned char *)map_result.pData)});
}
d3d11_device_context_->Unmap(d3d11_texture_mapped_.get(), 0);
}
}
if (is_new_size) {
capture_framepool_.Recreate(d3d11_direct_device_,
winrt::Windows::Graphics::DirectX::
DirectXPixelFormat::B8G8R8A8UIntNormalized,
2, capture_frame_size_);
}
}
void WgcSessionImpl::OnClosed(
winrt::Windows::Graphics::Capture::GraphicsCaptureItem const &,
winrt::Windows::Foundation::IInspectable const &) {
OutputDebugStringW(L"WgcSessionImpl::OnClosed");
}
int WgcSessionImpl::Initialize() {
if (is_initialized_) return 0;
if (!(d3d11_direct_device_ = CreateD3D11Device())) {
std::cout << "AE_D3D_CREATE_DEVICE_FAILED" << std::endl;
return 1;
}
try {
if (target_.is_window)
capture_item_ = CreateCaptureItemForWindow(target_.hwnd);
else
capture_item_ = CreateCaptureItemForMonitor(target_.hmonitor);
// Set up
auto d3d11_device =
GetDXGIInterfaceFromObject<ID3D11Device>(d3d11_direct_device_);
d3d11_device->GetImmediateContext(d3d11_device_context_.put());
} catch (winrt::hresult_error) {
std::cout << "AE_WGC_CREATE_CAPTURER_FAILED" << std::endl;
return 86;
} catch (...) {
return 86;
}
is_initialized_ = true;
return 0;
}
void WgcSessionImpl::CleanUp() {
std::lock_guard locker(lock_);
auto expected = false;
if (cleaned_.compare_exchange_strong(expected, true)) {
capture_close_trigger_.revoke();
capture_framepool_trigger_.revoke();
if (capture_framepool_) capture_framepool_.Close();
if (capture_session_) capture_session_.Close();
capture_framepool_ = nullptr;
capture_session_ = nullptr;
capture_item_ = nullptr;
is_initialized_ = false;
}
}
LRESULT CALLBACK WindowProc(HWND window, UINT message, WPARAM w_param,
LPARAM l_param) {
return DefWindowProc(window, message, w_param, l_param);
}
// void WgcSessionImpl::message_func() {
// const std::wstring kClassName = L"am_fake_window";
// WNDCLASS wc = {};
// wc.style = CS_HREDRAW | CS_VREDRAW;
// wc.lpfnWndProc = DefWindowProc;
// wc.hCursor = LoadCursor(nullptr, IDC_ARROW);
// wc.hbrBackground = reinterpret_cast<HBRUSH>(COLOR_WINDOW);
// wc.lpszClassName = kClassName.c_str();
// if (!::RegisterClassW(&wc)) return;
// hwnd_ = ::CreateWindowW(kClassName.c_str(), nullptr, WS_OVERLAPPEDWINDOW,
// 0,
// 0, 0, 0, nullptr, nullptr, nullptr, nullptr);
// MSG msg;
// while (is_running_) {
// while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
// if (!is_running_) break;
// TranslateMessage(&msg);
// DispatchMessage(&msg);
// }
// Sleep(10);
// }
// ::CloseWindow(hwnd_);
// ::DestroyWindow(hwnd_);
// }

View File

@@ -0,0 +1,116 @@
#ifndef _WGC_SESSION_IMPL_H_
#define _WGC_SESSION_IMPL_H_
#include <d3d11_4.h>
#include <winrt/Windows.Foundation.h>
#include <winrt/Windows.Graphics.Capture.h>
#include <mutex>
#include <thread>
#include "wgc_session.h"
class WgcSessionImpl : public WgcSession {
struct __declspec(uuid("A9B3D012-3DF2-4EE3-B8D1-8695F457D3C1"))
IDirect3DDxgiInterfaceAccess : ::IUnknown {
virtual HRESULT __stdcall GetInterface(GUID const &id, void **object) = 0;
};
template <typename T>
inline auto GetDXGIInterfaceFromObject(
winrt::Windows::Foundation::IInspectable const &object) {
auto access = object.as<IDirect3DDxgiInterfaceAccess>();
winrt::com_ptr<T> result;
winrt::check_hresult(
access->GetInterface(winrt::guid_of<T>(), result.put_void()));
return result;
}
struct {
union {
HWND hwnd;
HMONITOR hmonitor;
};
bool is_window;
} target_{0};
public:
WgcSessionImpl();
~WgcSessionImpl() override;
public:
void Release() override;
int Initialize(HWND hwnd) override;
int Initialize(HMONITOR hmonitor) override;
void RegisterObserver(wgc_session_observer *observer) override;
int Start() override;
int Stop() override;
int Pause() override;
int Resume() override;
private:
auto CreateD3D11Device();
auto CreateCaptureItemForWindow(HWND hwnd);
auto CreateCaptureItemForMonitor(HMONITOR hmonitor);
HRESULT CreateMappedTexture(winrt::com_ptr<ID3D11Texture2D> src_texture,
unsigned int width = 0, unsigned int height = 0);
void OnFrame(
winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool const
&sender,
winrt::Windows::Foundation::IInspectable const &args);
void OnClosed(winrt::Windows::Graphics::Capture::GraphicsCaptureItem const &,
winrt::Windows::Foundation::IInspectable const &);
int Initialize();
void CleanUp();
// void message_func();
private:
std::mutex lock_;
bool is_initialized_ = false;
bool is_running_ = false;
bool is_paused_ = false;
wgc_session_observer *observer_ = nullptr;
// wgc
winrt::Windows::Graphics::Capture::GraphicsCaptureItem capture_item_{nullptr};
winrt::Windows::Graphics::Capture::GraphicsCaptureSession capture_session_{
nullptr};
winrt::Windows::Graphics::SizeInt32 capture_frame_size_;
winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice
d3d11_direct_device_{nullptr};
winrt::com_ptr<ID3D11DeviceContext> d3d11_device_context_{nullptr};
winrt::com_ptr<ID3D11Texture2D> d3d11_texture_mapped_{nullptr};
std::atomic<bool> cleaned_ = false;
winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool
capture_framepool_{nullptr};
winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool::
FrameArrived_revoker capture_framepool_trigger_;
winrt::Windows::Graphics::Capture::GraphicsCaptureItem::Closed_revoker
capture_close_trigger_;
// message loop
std::thread loop_;
HWND hwnd_ = nullptr;
};
// template <typename T>
// inline auto WgcSessionImpl::GetDXGIInterfaceFromObject(
// winrt::Windows::Foundation::IInspectable const &object) {
// auto access = object.as<IDirect3DDxgiInterfaceAccess>();
// winrt::com_ptr<T> result;
// winrt::check_hresult(
// access->GetInterface(winrt::guid_of<T>(), result.put_void()));
// return result;
// }
#endif