mirror of
https://github.com/kunkundi/crossdesk.git
synced 2025-10-26 12:15:34 +08:00
[feat] remove dependence on ffmpeg for MacOSx
This commit is contained in:
@@ -1,212 +0,0 @@
|
|||||||
#include "screen_capturer_avf.h"
|
|
||||||
|
|
||||||
#include <ApplicationServices/ApplicationServices.h>
|
|
||||||
|
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
#include "rd_log.h"
|
|
||||||
|
|
||||||
#define USE_SCALE_FACTOR 0
|
|
||||||
|
|
||||||
ScreenCapturerAvf::ScreenCapturerAvf() {}
|
|
||||||
|
|
||||||
ScreenCapturerAvf::~ScreenCapturerAvf() {
|
|
||||||
if (inited_ && capture_thread_.joinable()) {
|
|
||||||
capture_thread_.join();
|
|
||||||
inited_ = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nv12_frame_) {
|
|
||||||
delete[] nv12_frame_;
|
|
||||||
nv12_frame_ = nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pFormatCtx_) {
|
|
||||||
avformat_close_input(&pFormatCtx_);
|
|
||||||
pFormatCtx_ = nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pCodecCtx_) {
|
|
||||||
avcodec_free_context(&pCodecCtx_);
|
|
||||||
pCodecCtx_ = nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options_) {
|
|
||||||
av_dict_free(&options_);
|
|
||||||
options_ = nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pFrame_) {
|
|
||||||
av_frame_free(&pFrame_);
|
|
||||||
pFrame_ = nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (packet_) {
|
|
||||||
av_packet_free(&packet_);
|
|
||||||
packet_ = nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
#if USE_SCALE_FACTOR
|
|
||||||
if (img_convert_ctx_) {
|
|
||||||
sws_freeContext(img_convert_ctx_);
|
|
||||||
img_convert_ctx_ = nullptr;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
int ScreenCapturerAvf::Init(const int fps, cb_desktop_data cb) {
|
|
||||||
if (cb) {
|
|
||||||
_on_data = cb;
|
|
||||||
}
|
|
||||||
|
|
||||||
av_log_set_level(AV_LOG_QUIET);
|
|
||||||
|
|
||||||
pFormatCtx_ = avformat_alloc_context();
|
|
||||||
|
|
||||||
avdevice_register_all();
|
|
||||||
|
|
||||||
// grabbing frame rate
|
|
||||||
av_dict_set(&options_, "framerate", "60", 0);
|
|
||||||
av_dict_set(&options_, "pixel_format", "nv12", 0);
|
|
||||||
// show remote cursor
|
|
||||||
av_dict_set(&options_, "capture_cursor", "0", 0);
|
|
||||||
// Make the grabbed area follow the mouse
|
|
||||||
// av_dict_set(&options_, "follow_mouse", "centered", 0);
|
|
||||||
// Video frame size. The default is to capture the full screen
|
|
||||||
// av_dict_set(&options_, "video_size", "1440x900", 0);
|
|
||||||
ifmt_ = (AVInputFormat *)av_find_input_format("avfoundation");
|
|
||||||
if (!ifmt_) {
|
|
||||||
printf("Couldn't find_input_format\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Grab at position 10,20
|
|
||||||
if (avformat_open_input(&pFormatCtx_, "Capture screen 0", ifmt_, &options_) !=
|
|
||||||
0) {
|
|
||||||
printf("Couldn't open input stream.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (avformat_find_stream_info(pFormatCtx_, NULL) < 0) {
|
|
||||||
printf("Couldn't find stream information.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
videoindex_ = -1;
|
|
||||||
for (i_ = 0; i_ < pFormatCtx_->nb_streams; i_++)
|
|
||||||
if (pFormatCtx_->streams[i_]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
|
||||||
videoindex_ = i_;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (videoindex_ == -1) {
|
|
||||||
printf("Didn't find a video stream.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
pCodecParam_ = pFormatCtx_->streams[videoindex_]->codecpar;
|
|
||||||
|
|
||||||
pCodecCtx_ = avcodec_alloc_context3(NULL);
|
|
||||||
avcodec_parameters_to_context(pCodecCtx_, pCodecParam_);
|
|
||||||
|
|
||||||
pCodec_ = const_cast<AVCodec *>(avcodec_find_decoder(pCodecCtx_->codec_id));
|
|
||||||
if (pCodec_ == NULL) {
|
|
||||||
printf("Codec not found.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (avcodec_open2(pCodecCtx_, pCodec_, NULL) < 0) {
|
|
||||||
printf("Could not open codec.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
const int screen_w = pFormatCtx_->streams[videoindex_]->codecpar->width;
|
|
||||||
const int screen_h = pFormatCtx_->streams[videoindex_]->codecpar->height;
|
|
||||||
|
|
||||||
pFrame_ = av_frame_alloc();
|
|
||||||
pFrame_->width = screen_w;
|
|
||||||
pFrame_->height = screen_h;
|
|
||||||
|
|
||||||
#if USE_SCALE_FACTOR
|
|
||||||
pFrame_resized_ = av_frame_alloc();
|
|
||||||
pFrame_resized_->width = CGDisplayPixelsWide(CGMainDisplayID());
|
|
||||||
pFrame_resized_->height = CGDisplayPixelsHigh(CGMainDisplayID());
|
|
||||||
|
|
||||||
img_convert_ctx_ =
|
|
||||||
sws_getContext(pFrame_->width, pFrame_->height, pCodecCtx_->pix_fmt,
|
|
||||||
pFrame_resized_->width, pFrame_resized_->height,
|
|
||||||
AV_PIX_FMT_NV12, SWS_BICUBIC, NULL, NULL, NULL);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!nv12_frame_) {
|
|
||||||
nv12_frame_ = new unsigned char[screen_w * screen_h * 3 / 2];
|
|
||||||
}
|
|
||||||
|
|
||||||
packet_ = (AVPacket *)av_malloc(sizeof(AVPacket));
|
|
||||||
|
|
||||||
inited_ = true;
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ScreenCapturerAvf::Destroy() {
|
|
||||||
running_ = false;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ScreenCapturerAvf::Start() {
|
|
||||||
if (running_) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
running_ = true;
|
|
||||||
capture_thread_ = std::thread([this]() {
|
|
||||||
while (running_) {
|
|
||||||
if (av_read_frame(pFormatCtx_, packet_) >= 0) {
|
|
||||||
if (packet_->stream_index == videoindex_) {
|
|
||||||
avcodec_send_packet(pCodecCtx_, packet_);
|
|
||||||
av_packet_unref(packet_);
|
|
||||||
got_picture_ = avcodec_receive_frame(pCodecCtx_, pFrame_);
|
|
||||||
|
|
||||||
if (!got_picture_) {
|
|
||||||
#if USE_SCALE_FACTOR
|
|
||||||
av_image_fill_arrays(pFrame_resized_->data,
|
|
||||||
pFrame_resized_->linesize, nv12_frame_,
|
|
||||||
AV_PIX_FMT_NV12, pFrame_resized_->width,
|
|
||||||
pFrame_resized_->height, 1);
|
|
||||||
|
|
||||||
sws_scale(img_convert_ctx_, pFrame_->data, pFrame_->linesize, 0,
|
|
||||||
pFrame_->height, pFrame_resized_->data,
|
|
||||||
pFrame_resized_->linesize);
|
|
||||||
|
|
||||||
_on_data((unsigned char *)nv12_frame_,
|
|
||||||
pFrame_resized_->width * pFrame_resized_->height * 3 / 2,
|
|
||||||
pFrame_resized_->width, pFrame_resized_->height);
|
|
||||||
#else
|
|
||||||
memcpy(nv12_frame_, pFrame_->data[0],
|
|
||||||
pFrame_->linesize[0] * pFrame_->height);
|
|
||||||
memcpy(nv12_frame_ + pFrame_->linesize[0] * pFrame_->height,
|
|
||||||
pFrame_->data[1],
|
|
||||||
pFrame_->linesize[1] * pFrame_->height / 2);
|
|
||||||
_on_data((unsigned char *)nv12_frame_,
|
|
||||||
pFrame_->width * pFrame_->height * 3 / 2, pFrame_->width,
|
|
||||||
pFrame_->height);
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ScreenCapturerAvf::Stop() {
|
|
||||||
running_ = false;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ScreenCapturerAvf::Pause() { return 0; }
|
|
||||||
|
|
||||||
int ScreenCapturerAvf::Resume() { return 0; }
|
|
||||||
|
|
||||||
void ScreenCapturerAvf::OnFrame() {}
|
|
||||||
|
|
||||||
void ScreenCapturerAvf::CleanUp() {}
|
|
||||||
@@ -1,87 +0,0 @@
|
|||||||
/*
|
|
||||||
* @Author: DI JUNKUN
|
|
||||||
* @Date: 2023-12-01
|
|
||||||
* Copyright (c) 2023 by DI JUNKUN, All Rights Reserved.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef _SCREEN_CAPTURER_AVF_H_
|
|
||||||
#define _SCREEN_CAPTURER_AVF_H_
|
|
||||||
|
|
||||||
#include <atomic>
|
|
||||||
#include <functional>
|
|
||||||
#include <string>
|
|
||||||
#include <thread>
|
|
||||||
|
|
||||||
#include "screen_capturer.h"
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
|
||||||
extern "C" {
|
|
||||||
#endif
|
|
||||||
#include <libavcodec/avcodec.h>
|
|
||||||
#include <libavdevice/avdevice.h>
|
|
||||||
#include <libavformat/avformat.h>
|
|
||||||
#include <libavutil/imgutils.h>
|
|
||||||
#include <libswscale/swscale.h>
|
|
||||||
#ifdef __cplusplus
|
|
||||||
};
|
|
||||||
#endif
|
|
||||||
|
|
||||||
class ScreenCapturerAvf : public ScreenCapturer {
|
|
||||||
public:
|
|
||||||
ScreenCapturerAvf();
|
|
||||||
~ScreenCapturerAvf();
|
|
||||||
|
|
||||||
public:
|
|
||||||
virtual int Init(const int fps, cb_desktop_data cb);
|
|
||||||
|
|
||||||
virtual int Destroy();
|
|
||||||
|
|
||||||
virtual int Start();
|
|
||||||
|
|
||||||
virtual int Stop();
|
|
||||||
|
|
||||||
int Pause();
|
|
||||||
int Resume();
|
|
||||||
|
|
||||||
void OnFrame();
|
|
||||||
|
|
||||||
protected:
|
|
||||||
void CleanUp();
|
|
||||||
|
|
||||||
private:
|
|
||||||
std::atomic_bool _paused;
|
|
||||||
std::atomic_bool _inited;
|
|
||||||
|
|
||||||
std::thread _thread;
|
|
||||||
|
|
||||||
std::string _device_name;
|
|
||||||
|
|
||||||
int _fps;
|
|
||||||
|
|
||||||
cb_desktop_data _on_data;
|
|
||||||
|
|
||||||
private:
|
|
||||||
int i_ = 0;
|
|
||||||
int videoindex_ = 0;
|
|
||||||
int got_picture_ = 0;
|
|
||||||
bool inited_ = false;
|
|
||||||
|
|
||||||
// ffmpeg
|
|
||||||
AVFormatContext *pFormatCtx_ = nullptr;
|
|
||||||
AVCodecContext *pCodecCtx_ = nullptr;
|
|
||||||
AVCodec *pCodec_ = nullptr;
|
|
||||||
AVCodecParameters *pCodecParam_ = nullptr;
|
|
||||||
AVDictionary *options_ = nullptr;
|
|
||||||
AVInputFormat *ifmt_ = nullptr;
|
|
||||||
AVFrame *pFrame_ = nullptr;
|
|
||||||
AVFrame *pFrame_resized_ = nullptr;
|
|
||||||
AVPacket *packet_ = nullptr;
|
|
||||||
struct SwsContext *img_convert_ctx_ = nullptr;
|
|
||||||
unsigned char *nv12_frame_ = nullptr;
|
|
||||||
|
|
||||||
// thread
|
|
||||||
std::thread capture_thread_;
|
|
||||||
std::atomic_bool running_;
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif
|
|
||||||
@@ -1,104 +0,0 @@
|
|||||||
#include <IOSurface/IOSurface.h>
|
|
||||||
|
|
||||||
#include <utility>
|
|
||||||
|
|
||||||
#include "rd_log.h"
|
|
||||||
#include "screen_capturer_cgd.h"
|
|
||||||
|
|
||||||
ScreenCapturerCg::ScreenCapturerCg() {}
|
|
||||||
|
|
||||||
ScreenCapturerCg::~ScreenCapturerCg() {}
|
|
||||||
|
|
||||||
int ScreenCapturerCg::Init(const int fps, cb_desktop_data cb) {
|
|
||||||
if (cb) {
|
|
||||||
_on_data = cb;
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t pixel_width = 1280;
|
|
||||||
size_t pixel_height = 720;
|
|
||||||
CGDirectDisplayID display_id = 0;
|
|
||||||
|
|
||||||
CGDisplayStreamFrameAvailableHandler handler =
|
|
||||||
^(CGDisplayStreamFrameStatus status, uint64_t display_time,
|
|
||||||
IOSurfaceRef frame_surface, CGDisplayStreamUpdateRef updateRef) {
|
|
||||||
if (status == kCGDisplayStreamFrameStatusStopped) return;
|
|
||||||
// Only pay attention to frame updates.
|
|
||||||
if (status != kCGDisplayStreamFrameStatusFrameComplete) return;
|
|
||||||
|
|
||||||
// size_t count = 0;
|
|
||||||
// const CGRect* rects = CGDisplayStreamUpdateGetRects(
|
|
||||||
// updateRef, kCGDisplayStreamUpdateDirtyRects, &count);
|
|
||||||
|
|
||||||
// 获取帧数据
|
|
||||||
void* frameData = IOSurfaceGetBaseAddressOfPlane(frame_surface, 0);
|
|
||||||
size_t width = IOSurfaceGetWidthOfPlane(frame_surface, 0);
|
|
||||||
size_t height = IOSurfaceGetHeightOfPlane(frame_surface, 0);
|
|
||||||
};
|
|
||||||
|
|
||||||
CFDictionaryRef properties_dictionary = CFDictionaryCreate(
|
|
||||||
kCFAllocatorDefault, (const void*[]){kCGDisplayStreamShowCursor},
|
|
||||||
(const void*[]){kCFBooleanFalse}, 1, &kCFTypeDictionaryKeyCallBacks,
|
|
||||||
&kCFTypeDictionaryValueCallBacks);
|
|
||||||
|
|
||||||
CGDisplayStreamRef display_stream =
|
|
||||||
CGDisplayStreamCreate(display_id, pixel_width, pixel_height, 'BGRA',
|
|
||||||
properties_dictionary, handler);
|
|
||||||
|
|
||||||
if (display_stream) {
|
|
||||||
CGError error = CGDisplayStreamStart(display_stream);
|
|
||||||
if (error != kCGErrorSuccess) return -1;
|
|
||||||
|
|
||||||
CFRunLoopSourceRef source = CGDisplayStreamGetRunLoopSource(display_stream);
|
|
||||||
CFRunLoopAddSource(CFRunLoopGetCurrent(), source, kCFRunLoopCommonModes);
|
|
||||||
display_streams_.push_back(display_stream);
|
|
||||||
}
|
|
||||||
|
|
||||||
CFRelease(properties_dictionary);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ScreenCapturerCg::Destroy() {
|
|
||||||
running_ = false;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ScreenCapturerCg::Start() {
|
|
||||||
if (_running) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
running_ = true;
|
|
||||||
capture_thread_ = std::thread([this]() {
|
|
||||||
while (running_) {
|
|
||||||
CFRunLoopRun();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ScreenCapturerCg::Stop() {
|
|
||||||
running_ = false;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ScreenCapturerCg::Pause() { return 0; }
|
|
||||||
|
|
||||||
int ScreenCapturerCg::Resume() { return 0; }
|
|
||||||
|
|
||||||
void ScreenCapturerCg::OnFrame() {}
|
|
||||||
|
|
||||||
void ScreenCapturerCg::CleanUp() {}
|
|
||||||
|
|
||||||
//
|
|
||||||
|
|
||||||
void ScreenCapturerCg::UnregisterRefreshAndMoveHandlers() {
|
|
||||||
for (CGDisplayStreamRef stream : display_streams_) {
|
|
||||||
CFRunLoopSourceRef source = CGDisplayStreamGetRunLoopSource(stream);
|
|
||||||
CFRunLoopRemoveSource(CFRunLoopGetCurrent(), source, kCFRunLoopCommonModes);
|
|
||||||
CGDisplayStreamStop(stream);
|
|
||||||
CFRelease(stream);
|
|
||||||
}
|
|
||||||
display_streams_.clear();
|
|
||||||
}
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
|
|
||||||
/*
|
|
||||||
* @Author: DI JUNKUN
|
|
||||||
* @Date: 2024-10-16
|
|
||||||
* Copyright (c) 2024 by DI JUNKUN, All Rights Reserved.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef _SCREEN_CAPTURER_CGD_H_
|
|
||||||
#define _SCREEN_CAPTURER_CGD_H_
|
|
||||||
|
|
||||||
#include <CoreGraphics/CoreGraphics.h>
|
|
||||||
|
|
||||||
#include <atomic>
|
|
||||||
#include <functional>
|
|
||||||
#include <memory>
|
|
||||||
#include <string>
|
|
||||||
#include <thread>
|
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
#include "screen_capturer.h"
|
|
||||||
|
|
||||||
class ScreenCapturerCg : public ScreenCapturer {
|
|
||||||
public:
|
|
||||||
ScreenCapturerCg();
|
|
||||||
~ScreenCapturerCg();
|
|
||||||
|
|
||||||
public:
|
|
||||||
virtual int Init(const int fps, cb_desktop_data cb);
|
|
||||||
|
|
||||||
virtual int Destroy();
|
|
||||||
|
|
||||||
virtual int Start();
|
|
||||||
|
|
||||||
virtual int Stop();
|
|
||||||
|
|
||||||
int Pause();
|
|
||||||
|
|
||||||
int Resume();
|
|
||||||
|
|
||||||
void OnFrame();
|
|
||||||
|
|
||||||
protected:
|
|
||||||
void CleanUp();
|
|
||||||
|
|
||||||
private:
|
|
||||||
int _fps;
|
|
||||||
cb_desktop_data _on_data;
|
|
||||||
|
|
||||||
// thread
|
|
||||||
std::thread capture_thread_;
|
|
||||||
std::atomic_bool running_;
|
|
||||||
|
|
||||||
private:
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif
|
|
||||||
@@ -1,303 +0,0 @@
|
|||||||
// MyAudioSink.cpp : 定义控制台应用程序的入口点。
|
|
||||||
//
|
|
||||||
|
|
||||||
// #define _CRT_SECURE_NO_WARNINGS
|
|
||||||
|
|
||||||
#include <Audioclient.h>
|
|
||||||
#include <Devicetopology.h>
|
|
||||||
#include <Endpointvolume.h>
|
|
||||||
#include <Mmdeviceapi.h>
|
|
||||||
#include <tchar.h>
|
|
||||||
|
|
||||||
#include <iostream>
|
|
||||||
//-----------------------------------------------------------
|
|
||||||
// Record an audio stream from the default audio capture
|
|
||||||
// device. The RecordAudioStream function allocates a shared
|
|
||||||
// buffer big enough to hold one second of PCM audio data.
|
|
||||||
// The function uses this buffer to stream data from the
|
|
||||||
// capture device. The main loop runs every 1/2 second.
|
|
||||||
//-----------------------------------------------------------
|
|
||||||
|
|
||||||
// REFERENCE_TIME time units per second and per millisecond
|
|
||||||
#define REFTIMES_PER_SEC 10000000
|
|
||||||
#define REFTIMES_PER_MILLISEC 10000
|
|
||||||
|
|
||||||
#define EXIT_ON_ERROR(hres) \
|
|
||||||
if (FAILED(hres)) { \
|
|
||||||
goto Exit; \
|
|
||||||
}
|
|
||||||
|
|
||||||
#define SAFE_RELEASE(punk) \
|
|
||||||
if ((punk) != NULL) { \
|
|
||||||
(punk)->Release(); \
|
|
||||||
(punk) = NULL; \
|
|
||||||
}
|
|
||||||
|
|
||||||
#define IS_INPUT_DEVICE 0 // 切换输入和输出音频设备
|
|
||||||
|
|
||||||
#define BUFFER_TIME_100NS (5 * 10000000)
|
|
||||||
|
|
||||||
const CLSID CLSID_MMDeviceEnumerator = __uuidof(MMDeviceEnumerator);
|
|
||||||
const IID IID_IMMDeviceEnumerator = __uuidof(IMMDeviceEnumerator);
|
|
||||||
const IID IID_IAudioClient = __uuidof(IAudioClient);
|
|
||||||
const IID IID_IAudioCaptureClient = __uuidof(IAudioCaptureClient);
|
|
||||||
|
|
||||||
const IID IID_IDeviceTopology = __uuidof(IDeviceTopology);
|
|
||||||
const IID IID_IAudioVolumeLevel = __uuidof(IAudioVolumeLevel);
|
|
||||||
const IID IID_IPart = __uuidof(IPart);
|
|
||||||
const IID IID_IConnector = __uuidof(IConnector);
|
|
||||||
const IID IID_IAudioEndpointVolume = __uuidof(IAudioEndpointVolume);
|
|
||||||
|
|
||||||
class MyAudioSink {
|
|
||||||
public:
|
|
||||||
// WAVEFORMATEX *pwfx = NULL;
|
|
||||||
int SetFormat(WAVEFORMATEX *pwfx);
|
|
||||||
|
|
||||||
int CopyData(SHORT *pData, UINT32 numFramesAvailable, BOOL *pbDone);
|
|
||||||
};
|
|
||||||
|
|
||||||
int MyAudioSink::SetFormat(WAVEFORMATEX *pwfx) {
|
|
||||||
printf("wFormatTag is %x\n", pwfx->wFormatTag);
|
|
||||||
printf("nChannels is %x\n", pwfx->nChannels);
|
|
||||||
printf("nSamplesPerSec is %d\n", pwfx->nSamplesPerSec);
|
|
||||||
printf("nAvgBytesPerSec is %d\n", pwfx->nAvgBytesPerSec);
|
|
||||||
printf("wBitsPerSample is %d\n", pwfx->wBitsPerSample);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
FILE *fp;
|
|
||||||
|
|
||||||
int MyAudioSink::CopyData(SHORT *pData, UINT32 numFramesAvailable,
|
|
||||||
BOOL *pbDone) {
|
|
||||||
if (pData != NULL) {
|
|
||||||
size_t t = sizeof(SHORT);
|
|
||||||
for (int i = 0; i < numFramesAvailable / t; i++) {
|
|
||||||
double dbVal = pData[i];
|
|
||||||
pData[i] = dbVal; // 可以通过不同的分母来控制声音大小
|
|
||||||
}
|
|
||||||
fwrite(pData, numFramesAvailable, 1, fp);
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// pwfx->nSamplesPerSec = 44100;
|
|
||||||
/// 不支持修改采样率, 看来只能等得到数据之后再 swr 转换了
|
|
||||||
BOOL AdjustFormatTo16Bits(WAVEFORMATEX *pwfx) {
|
|
||||||
BOOL bRet(FALSE);
|
|
||||||
|
|
||||||
if (pwfx->wFormatTag == WAVE_FORMAT_IEEE_FLOAT) {
|
|
||||||
pwfx->wFormatTag = WAVE_FORMAT_PCM;
|
|
||||||
pwfx->wBitsPerSample = 16;
|
|
||||||
pwfx->nBlockAlign = pwfx->nChannels * pwfx->wBitsPerSample / 8;
|
|
||||||
pwfx->nAvgBytesPerSec = pwfx->nBlockAlign * pwfx->nSamplesPerSec;
|
|
||||||
|
|
||||||
bRet = TRUE;
|
|
||||||
} else if (pwfx->wFormatTag == WAVE_FORMAT_EXTENSIBLE) {
|
|
||||||
PWAVEFORMATEXTENSIBLE pEx = reinterpret_cast<PWAVEFORMATEXTENSIBLE>(pwfx);
|
|
||||||
if (IsEqualGUID(KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, pEx->SubFormat)) {
|
|
||||||
pEx->SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
|
|
||||||
pEx->Samples.wValidBitsPerSample = 16;
|
|
||||||
pwfx->wBitsPerSample = 16;
|
|
||||||
pwfx->nBlockAlign = pwfx->nChannels * pwfx->wBitsPerSample / 8;
|
|
||||||
pwfx->nAvgBytesPerSec = pwfx->nBlockAlign * pwfx->nSamplesPerSec;
|
|
||||||
|
|
||||||
bRet = TRUE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return bRet;
|
|
||||||
}
|
|
||||||
|
|
||||||
typedef unsigned long long uint64_t;
|
|
||||||
static bool have_clockfreq = false;
|
|
||||||
static LARGE_INTEGER clock_freq;
|
|
||||||
static inline uint64_t get_clockfreq(void) {
|
|
||||||
if (!have_clockfreq) QueryPerformanceFrequency(&clock_freq);
|
|
||||||
return clock_freq.QuadPart;
|
|
||||||
}
|
|
||||||
uint64_t os_gettime_ns(void) {
|
|
||||||
LARGE_INTEGER current_time;
|
|
||||||
double time_val;
|
|
||||||
|
|
||||||
QueryPerformanceCounter(¤t_time);
|
|
||||||
time_val = (double)current_time.QuadPart;
|
|
||||||
time_val *= 1000000000.0;
|
|
||||||
time_val /= (double)get_clockfreq();
|
|
||||||
|
|
||||||
return (uint64_t)time_val;
|
|
||||||
}
|
|
||||||
|
|
||||||
HRESULT RecordAudioStream(MyAudioSink *pMySink) {
|
|
||||||
HRESULT hr;
|
|
||||||
REFERENCE_TIME hnsActualDuration;
|
|
||||||
UINT32 bufferFrameCount;
|
|
||||||
UINT32 numFramesAvailable;
|
|
||||||
BYTE *pData;
|
|
||||||
DWORD flags;
|
|
||||||
REFERENCE_TIME hnsDefaultDevicePeriod(0);
|
|
||||||
|
|
||||||
REFERENCE_TIME hnsRequestedDuration = REFTIMES_PER_SEC;
|
|
||||||
IMMDeviceEnumerator *pEnumerator = NULL;
|
|
||||||
IMMDevice *pDevice = NULL;
|
|
||||||
IAudioClient *pAudioClient = NULL;
|
|
||||||
IAudioCaptureClient *pCaptureClient = NULL;
|
|
||||||
WAVEFORMATEX *pwfx = NULL;
|
|
||||||
UINT32 packetLength = 0;
|
|
||||||
BOOL bDone = FALSE;
|
|
||||||
HANDLE hTimerWakeUp = NULL;
|
|
||||||
UINT64 pos, ts;
|
|
||||||
|
|
||||||
hr = CoCreateInstance(CLSID_MMDeviceEnumerator, NULL, CLSCTX_ALL,
|
|
||||||
IID_IMMDeviceEnumerator, (void **)&pEnumerator);
|
|
||||||
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
if (IS_INPUT_DEVICE)
|
|
||||||
hr = pEnumerator->GetDefaultAudioEndpoint(eCapture, eCommunications,
|
|
||||||
&pDevice); // 输入
|
|
||||||
else
|
|
||||||
hr = pEnumerator->GetDefaultAudioEndpoint(eRender, eConsole,
|
|
||||||
&pDevice); // 输出
|
|
||||||
|
|
||||||
// wchar_t *w_id;
|
|
||||||
// os_utf8_to_wcs_ptr(device_id.c_str(), device_id.size(), &w_id);
|
|
||||||
// hr = pEnumerator->GetDevice(w_id, &pDevice);
|
|
||||||
// bfree(w_id);
|
|
||||||
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
hr = pDevice->Activate(IID_IAudioClient, CLSCTX_ALL, NULL,
|
|
||||||
(void **)&pAudioClient);
|
|
||||||
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
hr = pAudioClient->GetMixFormat(&pwfx);
|
|
||||||
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
// The GetDevicePeriod method retrieves the length of the periodic interval
|
|
||||||
// separating successive processing passes by the audio engine on the data in
|
|
||||||
// the endpoint buffer.
|
|
||||||
hr = pAudioClient->GetDevicePeriod(&hnsDefaultDevicePeriod, NULL);
|
|
||||||
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
AdjustFormatTo16Bits(pwfx);
|
|
||||||
|
|
||||||
// 平时创建定时器使用的是WINAPI SetTimer,不过该函数一般用于有界面的时候。
|
|
||||||
// 无界面的情况下,可以选择微软提供的CreateWaitableTimer和SetWaitableTimer
|
|
||||||
// API。
|
|
||||||
hTimerWakeUp = CreateWaitableTimer(NULL, FALSE, NULL);
|
|
||||||
|
|
||||||
DWORD flag;
|
|
||||||
if (IS_INPUT_DEVICE)
|
|
||||||
flag = 0;
|
|
||||||
else
|
|
||||||
flag = AUDCLNT_STREAMFLAGS_LOOPBACK;
|
|
||||||
|
|
||||||
if (IS_INPUT_DEVICE)
|
|
||||||
hr = pAudioClient->Initialize(AUDCLNT_SHAREMODE_SHARED, flag /*0*/, 0, 0,
|
|
||||||
pwfx, NULL); // 输入
|
|
||||||
else
|
|
||||||
hr = pAudioClient->Initialize(AUDCLNT_SHAREMODE_SHARED, flag /*0*/, 0, 0,
|
|
||||||
pwfx, NULL); // 输出
|
|
||||||
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
// Get the size of the allocated buffer.
|
|
||||||
hr = pAudioClient->GetBufferSize(&bufferFrameCount);
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
hr = pAudioClient->GetService(IID_IAudioCaptureClient,
|
|
||||||
(void **)&pCaptureClient);
|
|
||||||
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
LARGE_INTEGER liFirstFire;
|
|
||||||
liFirstFire.QuadPart =
|
|
||||||
-hnsDefaultDevicePeriod / 2; // negative means relative time
|
|
||||||
LONG lTimeBetweenFires = (LONG)hnsDefaultDevicePeriod / 2 /
|
|
||||||
(10 * 1000); // convert to milliseconds
|
|
||||||
|
|
||||||
BOOL bOK = SetWaitableTimer(hTimerWakeUp, &liFirstFire, lTimeBetweenFires,
|
|
||||||
NULL, NULL, FALSE);
|
|
||||||
|
|
||||||
// Notify the audio sink which format to use.
|
|
||||||
hr = pMySink->SetFormat(pwfx);
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
// Calculate the actual duration of the allocated buffer.
|
|
||||||
hnsActualDuration =
|
|
||||||
(double)REFTIMES_PER_SEC * bufferFrameCount / pwfx->nSamplesPerSec;
|
|
||||||
|
|
||||||
/*************************************************************/
|
|
||||||
hr = pAudioClient->Start(); // Start recording.
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
HANDLE waitArray[1] = {/*htemp hEventStop,*/ hTimerWakeUp};
|
|
||||||
|
|
||||||
// Each loop fills about half of the shared buffer.
|
|
||||||
while (bDone == FALSE) {
|
|
||||||
// Sleep for half the buffer duration.
|
|
||||||
// Sleep(hnsActualDuration/REFTIMES_PER_MILLISEC/2);//这句貌似不加也可以
|
|
||||||
// WaitForSingleObject(hTimerWakeUp,INFINITE);
|
|
||||||
int a = sizeof(waitArray);
|
|
||||||
int aa = sizeof(waitArray[0]);
|
|
||||||
WaitForMultipleObjects(sizeof(waitArray) / sizeof(waitArray[0]), waitArray,
|
|
||||||
FALSE, INFINITE);
|
|
||||||
// WaitForMultipleObjects(sizeof(waitArray) / sizeof(waitArray[0]),
|
|
||||||
// waitArray, FALSE, INFINITE);
|
|
||||||
|
|
||||||
hr = pCaptureClient->GetNextPacketSize(&packetLength);
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
while (packetLength != 0) {
|
|
||||||
// Get the available data in the shared buffer.
|
|
||||||
hr = pCaptureClient->GetBuffer(&pData, &numFramesAvailable, &flags, NULL,
|
|
||||||
&ts);
|
|
||||||
ts = ts * 100;
|
|
||||||
uint64_t timestamp =
|
|
||||||
os_gettime_ns(); // ts是设备时间,timestamp是系统时间
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
// 位运算,flags的标志符为2(静音状态)时,将pData置为NULL
|
|
||||||
if (flags & AUDCLNT_BUFFERFLAGS_SILENT) {
|
|
||||||
pData = NULL; // Tell CopyData to write silence.
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy the available capture data to the audio sink.
|
|
||||||
hr = pMySink->CopyData((SHORT *)pData,
|
|
||||||
numFramesAvailable * pwfx->nBlockAlign, &bDone);
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
hr = pCaptureClient->ReleaseBuffer(numFramesAvailable);
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
hr = pCaptureClient->GetNextPacketSize(&packetLength);
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
hr = pAudioClient->Stop(); // Stop recording.
|
|
||||||
EXIT_ON_ERROR(hr)
|
|
||||||
|
|
||||||
Exit:
|
|
||||||
CoTaskMemFree(pwfx);
|
|
||||||
SAFE_RELEASE(pEnumerator)
|
|
||||||
SAFE_RELEASE(pDevice)
|
|
||||||
SAFE_RELEASE(pAudioClient)
|
|
||||||
SAFE_RELEASE(pCaptureClient)
|
|
||||||
|
|
||||||
return hr;
|
|
||||||
}
|
|
||||||
|
|
||||||
int _tmain(int argc, _TCHAR *argv[]) {
|
|
||||||
fopen_s(&fp, "record.pcm", "wb");
|
|
||||||
CoInitialize(NULL);
|
|
||||||
MyAudioSink test;
|
|
||||||
|
|
||||||
RecordAudioStream(&test);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
@@ -1,232 +0,0 @@
|
|||||||
extern "C" {
|
|
||||||
#include <libavcodec/avcodec.h>
|
|
||||||
#include <libavdevice/avdevice.h>
|
|
||||||
#include <libavfilter/avfilter.h>
|
|
||||||
#include <libavformat/avformat.h>
|
|
||||||
#include <libavutil/channel_layout.h>
|
|
||||||
#include <libavutil/imgutils.h>
|
|
||||||
#include <libavutil/opt.h>
|
|
||||||
#include <libavutil/samplefmt.h>
|
|
||||||
#include <libswresample/swresample.h>
|
|
||||||
#include <libswscale/swscale.h>
|
|
||||||
};
|
|
||||||
|
|
||||||
static int get_format_from_sample_fmt(const char **fmt,
|
|
||||||
enum AVSampleFormat sample_fmt) {
|
|
||||||
int i;
|
|
||||||
struct sample_fmt_entry {
|
|
||||||
enum AVSampleFormat sample_fmt;
|
|
||||||
const char *fmt_be, *fmt_le;
|
|
||||||
} sample_fmt_entries[] = {
|
|
||||||
{AV_SAMPLE_FMT_U8, "u8", "u8"},
|
|
||||||
{AV_SAMPLE_FMT_S16, "s16be", "s16le"},
|
|
||||||
{AV_SAMPLE_FMT_S32, "s32be", "s32le"},
|
|
||||||
{AV_SAMPLE_FMT_FLT, "f32be", "f32le"},
|
|
||||||
{AV_SAMPLE_FMT_DBL, "f64be", "f64le"},
|
|
||||||
};
|
|
||||||
*fmt = NULL;
|
|
||||||
|
|
||||||
for (i = 0; i < FF_ARRAY_ELEMS(sample_fmt_entries); i++) {
|
|
||||||
struct sample_fmt_entry *entry = &sample_fmt_entries[i];
|
|
||||||
if (sample_fmt == entry->sample_fmt) {
|
|
||||||
*fmt = AV_NE(entry->fmt_be, entry->fmt_le);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fprintf(stderr, "Sample format %s not supported as output format\n",
|
|
||||||
av_get_sample_fmt_name(sample_fmt));
|
|
||||||
return AVERROR(EINVAL);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fill dst buffer with nb_samples, generated starting from t. <20><><EFBFBD><EFBFBD>ģʽ<C4A3><CABD>
|
|
||||||
*/
|
|
||||||
static void fill_samples(double *dst, int nb_samples, int nb_channels,
|
|
||||||
int sample_rate, double *t) {
|
|
||||||
int i, j;
|
|
||||||
double tincr = 1.0 / sample_rate, *dstp = dst;
|
|
||||||
const double c = 2 * M_PI * 440.0;
|
|
||||||
|
|
||||||
/* generate sin tone with 440Hz frequency and duplicated channels */
|
|
||||||
for (i = 0; i < nb_samples; i++) {
|
|
||||||
*dstp = sin(c * *t);
|
|
||||||
for (j = 1; j < nb_channels; j++) dstp[j] = dstp[0];
|
|
||||||
dstp += nb_channels;
|
|
||||||
*t += tincr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
int main(int argc, char **argv) {
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
int64_t src_ch_layout = AV_CH_LAYOUT_MONO;
|
|
||||||
int src_rate = 44100;
|
|
||||||
enum AVSampleFormat src_sample_fmt = AV_SAMPLE_FMT_DBL;
|
|
||||||
int src_nb_channels = 0;
|
|
||||||
uint8_t **src_data = NULL; // <20><><EFBFBD><EFBFBD>ָ<EFBFBD><D6B8>
|
|
||||||
int src_linesize;
|
|
||||||
int src_nb_samples = 1024;
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
int64_t dst_ch_layout = AV_CH_LAYOUT_STEREO;
|
|
||||||
int dst_rate = 48000;
|
|
||||||
enum AVSampleFormat dst_sample_fmt = AV_SAMPLE_FMT_S16;
|
|
||||||
int dst_nb_channels = 0;
|
|
||||||
uint8_t **dst_data = NULL; // <20><><EFBFBD><EFBFBD>ָ<EFBFBD><D6B8>
|
|
||||||
int dst_linesize;
|
|
||||||
int dst_nb_samples;
|
|
||||||
int max_dst_nb_samples;
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>ļ<EFBFBD>
|
|
||||||
const char *dst_filename = NULL; // <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>pcm<63><6D><EFBFBD><EFBFBD><EFBFBD>أ<EFBFBD>Ȼ<EFBFBD><EFBFBD><F3B2A5B7><EFBFBD>֤
|
|
||||||
FILE *dst_file;
|
|
||||||
|
|
||||||
int dst_bufsize;
|
|
||||||
const char *fmt;
|
|
||||||
|
|
||||||
// <20>ز<EFBFBD><D8B2><EFBFBD>ʵ<EFBFBD><CAB5>
|
|
||||||
struct SwrContext *swr_ctx;
|
|
||||||
|
|
||||||
double t;
|
|
||||||
int ret;
|
|
||||||
|
|
||||||
dst_filename = "res.pcm";
|
|
||||||
|
|
||||||
dst_file = fopen(dst_filename, "wb");
|
|
||||||
if (!dst_file) {
|
|
||||||
fprintf(stderr, "Could not open destination file %s\n", dst_filename);
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>ز<EFBFBD><D8B2><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
/* create resampler context */
|
|
||||||
swr_ctx = swr_alloc();
|
|
||||||
if (!swr_ctx) {
|
|
||||||
fprintf(stderr, "Could not allocate resampler context\n");
|
|
||||||
ret = AVERROR(ENOMEM);
|
|
||||||
goto end;
|
|
||||||
}
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>ز<EFBFBD><D8B2><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
/* set options */
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
av_opt_set_int(swr_ctx, "in_channel_layout", src_ch_layout, 0);
|
|
||||||
av_opt_set_int(swr_ctx, "in_sample_rate", src_rate, 0);
|
|
||||||
av_opt_set_sample_fmt(swr_ctx, "in_sample_fmt", src_sample_fmt, 0);
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
av_opt_set_int(swr_ctx, "out_channel_layout", dst_ch_layout, 0);
|
|
||||||
av_opt_set_int(swr_ctx, "out_sample_rate", dst_rate, 0);
|
|
||||||
av_opt_set_sample_fmt(swr_ctx, "out_sample_fmt", dst_sample_fmt, 0);
|
|
||||||
|
|
||||||
// <20><>ʼ<EFBFBD><CABC><EFBFBD>ز<EFBFBD><D8B2><EFBFBD>
|
|
||||||
/* initialize the resampling context */
|
|
||||||
if ((ret = swr_init(swr_ctx)) < 0) {
|
|
||||||
fprintf(stderr, "Failed to initialize the resampling context\n");
|
|
||||||
goto end;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* allocate source and destination samples buffers */
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Դ<EFBFBD><D4B4>ͨ<EFBFBD><CDA8><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
src_nb_channels = av_get_channel_layout_nb_channels(src_ch_layout);
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Դ<EFBFBD><D4B4><EFBFBD><EFBFBD><EFBFBD>ڴ<EFBFBD><DAB4>ռ<EFBFBD>
|
|
||||||
ret = av_samples_alloc_array_and_samples(&src_data, &src_linesize,
|
|
||||||
src_nb_channels, src_nb_samples,
|
|
||||||
src_sample_fmt, 0);
|
|
||||||
if (ret < 0) {
|
|
||||||
fprintf(stderr, "Could not allocate source samples\n");
|
|
||||||
goto end;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* compute the number of converted samples: buffering is avoided
|
|
||||||
* ensuring that the output buffer will contain at least all the
|
|
||||||
* converted input samples */
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
max_dst_nb_samples = dst_nb_samples =
|
|
||||||
av_rescale_rnd(src_nb_samples, dst_rate, src_rate, AV_ROUND_UP);
|
|
||||||
|
|
||||||
/* buffer is going to be directly written to a rawaudio file, no alignment
|
|
||||||
*/
|
|
||||||
dst_nb_channels = av_get_channel_layout_nb_channels(dst_ch_layout);
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ڴ<EFBFBD>
|
|
||||||
ret = av_samples_alloc_array_and_samples(&dst_data, &dst_linesize,
|
|
||||||
dst_nb_channels, dst_nb_samples,
|
|
||||||
dst_sample_fmt, 0);
|
|
||||||
if (ret < 0) {
|
|
||||||
fprintf(stderr, "Could not allocate destination samples\n");
|
|
||||||
goto end;
|
|
||||||
}
|
|
||||||
|
|
||||||
t = 0;
|
|
||||||
do {
|
|
||||||
/* generate synthetic audio */
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Դ
|
|
||||||
fill_samples((double *)src_data[0], src_nb_samples, src_nb_channels,
|
|
||||||
src_rate, &t);
|
|
||||||
|
|
||||||
/* compute destination number of samples */
|
|
||||||
int64_t delay = swr_get_delay(swr_ctx, src_rate);
|
|
||||||
dst_nb_samples =
|
|
||||||
av_rescale_rnd(delay + src_nb_samples, dst_rate, src_rate, AV_ROUND_UP);
|
|
||||||
if (dst_nb_samples > max_dst_nb_samples) {
|
|
||||||
av_freep(&dst_data[0]);
|
|
||||||
ret = av_samples_alloc(dst_data, &dst_linesize, dst_nb_channels,
|
|
||||||
dst_nb_samples, dst_sample_fmt, 1);
|
|
||||||
if (ret < 0) break;
|
|
||||||
max_dst_nb_samples = dst_nb_samples;
|
|
||||||
}
|
|
||||||
// int fifo_size = swr_get_out_samples(swr_ctx,src_nb_samples);
|
|
||||||
// printf("fifo_size:%d\n", fifo_size);
|
|
||||||
// if(fifo_size < 1024)
|
|
||||||
// continue;
|
|
||||||
|
|
||||||
/* convert to destination format */
|
|
||||||
// ret = swr_convert(swr_ctx, dst_data, dst_nb_samples, (const
|
|
||||||
// uint8_t **)src_data, src_nb_samples);
|
|
||||||
ret = swr_convert(swr_ctx, dst_data, dst_nb_samples,
|
|
||||||
(const uint8_t **)src_data, src_nb_samples);
|
|
||||||
if (ret < 0) {
|
|
||||||
fprintf(stderr, "Error while converting\n");
|
|
||||||
goto end;
|
|
||||||
}
|
|
||||||
dst_bufsize = av_samples_get_buffer_size(&dst_linesize, dst_nb_channels,
|
|
||||||
ret, dst_sample_fmt, 1);
|
|
||||||
if (dst_bufsize < 0) {
|
|
||||||
fprintf(stderr, "Could not get sample buffer size\n");
|
|
||||||
goto end;
|
|
||||||
}
|
|
||||||
printf("t:%f in:%d out:%d\n", t, src_nb_samples, ret);
|
|
||||||
fwrite(dst_data[0], 1, dst_bufsize, dst_file);
|
|
||||||
} while (t < 10);
|
|
||||||
|
|
||||||
ret = swr_convert(swr_ctx, dst_data, dst_nb_samples, NULL, 0);
|
|
||||||
if (ret < 0) {
|
|
||||||
fprintf(stderr, "Error while converting\n");
|
|
||||||
goto end;
|
|
||||||
}
|
|
||||||
dst_bufsize = av_samples_get_buffer_size(&dst_linesize, dst_nb_channels, ret,
|
|
||||||
dst_sample_fmt, 1);
|
|
||||||
if (dst_bufsize < 0) {
|
|
||||||
fprintf(stderr, "Could not get sample buffer size\n");
|
|
||||||
goto end;
|
|
||||||
}
|
|
||||||
printf("flush in:%d out:%d\n", 0, ret);
|
|
||||||
fwrite(dst_data[0], 1, dst_bufsize, dst_file);
|
|
||||||
|
|
||||||
if ((ret = get_format_from_sample_fmt(&fmt, dst_sample_fmt)) < 0) goto end;
|
|
||||||
fprintf(stderr,
|
|
||||||
"Resampling succeeded. Play the output file with the command:\n"
|
|
||||||
"ffplay -f %s -channel_layout %" PRId64 " -channels %d -ar %d %s\n",
|
|
||||||
fmt, dst_ch_layout, dst_nb_channels, dst_rate, dst_filename);
|
|
||||||
|
|
||||||
end:
|
|
||||||
fclose(dst_file);
|
|
||||||
|
|
||||||
if (src_data) av_freep(&src_data[0]);
|
|
||||||
av_freep(&src_data);
|
|
||||||
|
|
||||||
if (dst_data) av_freep(&dst_data[0]);
|
|
||||||
av_freep(&dst_data);
|
|
||||||
|
|
||||||
swr_free(&swr_ctx);
|
|
||||||
return ret < 0;
|
|
||||||
}
|
|
||||||
@@ -1,96 +0,0 @@
|
|||||||
/*
|
|
||||||
Demonstrates how to implement loopback recording.
|
|
||||||
|
|
||||||
This example simply captures data from your default playback device until you
|
|
||||||
press Enter. The output is saved to the file specified on the command line.
|
|
||||||
|
|
||||||
Loopback mode is when you record audio that is played from a given speaker. It
|
|
||||||
is only supported on WASAPI, but can be used indirectly with PulseAudio by
|
|
||||||
choosing the appropriate loopback device after enumeration.
|
|
||||||
|
|
||||||
To use loopback mode you just need to set the device type to
|
|
||||||
ma_device_type_loopback and set the capture device config properties. The output
|
|
||||||
buffer in the callback will be null whereas the input buffer will be valid.
|
|
||||||
*/
|
|
||||||
#define MINIAUDIO_IMPLEMENTATION
|
|
||||||
#include "miniaudio.h"
|
|
||||||
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <stdlib.h>
|
|
||||||
|
|
||||||
FILE* fp;
|
|
||||||
|
|
||||||
void data_callback(ma_device* pDevice, void* pOutput, const void* pInput,
|
|
||||||
ma_uint32 frameCount) {
|
|
||||||
// ma_encoder* pEncoder = (ma_encoder*)pDevice->pUserData;
|
|
||||||
// MA_ASSERT(pEncoder != NULL);
|
|
||||||
|
|
||||||
// ma_encoder_write_pcm_frames(pEncoder, pInput, frameCount, NULL);
|
|
||||||
|
|
||||||
fwrite(pInput, frameCount * ma_get_bytes_per_frame(ma_format_s16, 1), 1, fp);
|
|
||||||
|
|
||||||
(void)pOutput;
|
|
||||||
}
|
|
||||||
|
|
||||||
int main(int argc, char** argv) {
|
|
||||||
ma_result result;
|
|
||||||
ma_encoder_config encoderConfig;
|
|
||||||
ma_encoder encoder;
|
|
||||||
ma_device_config deviceConfig;
|
|
||||||
ma_device device;
|
|
||||||
|
|
||||||
fopen_s(&fp, "miniaudio.pcm", "wb");
|
|
||||||
|
|
||||||
/* Loopback mode is currently only supported on WASAPI. */
|
|
||||||
ma_backend backends[] = {ma_backend_wasapi};
|
|
||||||
|
|
||||||
// if (argc < 2) {
|
|
||||||
// printf("No output file.\n");
|
|
||||||
// return -1;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// encoderConfig =
|
|
||||||
// ma_encoder_config_init(ma_encoding_format_wav, ma_format_s16, 1,
|
|
||||||
// 48000);
|
|
||||||
|
|
||||||
// if (ma_encoder_init_file(argv[1], &encoderConfig, &encoder) != MA_SUCCESS)
|
|
||||||
// {
|
|
||||||
// printf("Failed to initialize output file.\n");
|
|
||||||
// return -1;
|
|
||||||
// }
|
|
||||||
|
|
||||||
deviceConfig = ma_device_config_init(ma_device_type_loopback);
|
|
||||||
deviceConfig.capture.pDeviceID =
|
|
||||||
NULL; /* Use default device for this example. Set this to the ID of a
|
|
||||||
_playback_ device if you want to capture from a specific device.
|
|
||||||
*/
|
|
||||||
deviceConfig.capture.format = ma_format_s16;
|
|
||||||
deviceConfig.capture.channels = 1;
|
|
||||||
deviceConfig.sampleRate = 48000;
|
|
||||||
deviceConfig.dataCallback = data_callback;
|
|
||||||
deviceConfig.pUserData = nullptr;
|
|
||||||
|
|
||||||
result = ma_device_init_ex(backends, sizeof(backends) / sizeof(backends[0]),
|
|
||||||
NULL, &deviceConfig, &device);
|
|
||||||
if (result != MA_SUCCESS) {
|
|
||||||
printf("Failed to initialize loopback device.\n");
|
|
||||||
return -2;
|
|
||||||
}
|
|
||||||
|
|
||||||
result = ma_device_start(&device);
|
|
||||||
if (result != MA_SUCCESS) {
|
|
||||||
ma_device_uninit(&device);
|
|
||||||
printf("Failed to start device.\n");
|
|
||||||
return -3;
|
|
||||||
}
|
|
||||||
|
|
||||||
printf("Press Enter to stop recording...\n");
|
|
||||||
getchar();
|
|
||||||
|
|
||||||
fclose(fp);
|
|
||||||
|
|
||||||
ma_device_uninit(&device);
|
|
||||||
// ma_encoder_uninit(&encoder);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
#include <SDL2/SDL.h>
|
|
||||||
|
|
||||||
int main(int argc, char *argv[]) {
|
|
||||||
int ret;
|
|
||||||
SDL_AudioSpec wanted_spec, obtained_spec;
|
|
||||||
|
|
||||||
// Initialize SDL
|
|
||||||
if (SDL_Init(SDL_INIT_AUDIO) < 0) {
|
|
||||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to initialize SDL: %s",
|
|
||||||
SDL_GetError());
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set audio format
|
|
||||||
wanted_spec.freq = 44100; // Sample rate
|
|
||||||
wanted_spec.format =
|
|
||||||
AUDIO_F32SYS; // Sample format (32-bit float, system byte order)
|
|
||||||
wanted_spec.channels = 2; // Number of channels (stereo)
|
|
||||||
wanted_spec.samples = 1024; // Buffer size (in samples)
|
|
||||||
wanted_spec.callback = NULL; // Audio callback function (not used here)
|
|
||||||
|
|
||||||
// Open audio device
|
|
||||||
ret = SDL_OpenAudio(&wanted_spec, &obtained_spec);
|
|
||||||
if (ret < 0) {
|
|
||||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
||||||
"Failed to open audio device: %s", SDL_GetError());
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start playing audio
|
|
||||||
SDL_PauseAudio(0);
|
|
||||||
|
|
||||||
// Write PCM data to audio buffer
|
|
||||||
float *pcm_data = ...; // PCM data buffer (float, interleaved)
|
|
||||||
int pcm_data_size = ...; // Size of PCM data buffer (in bytes)
|
|
||||||
int bytes_written = SDL_QueueAudio(0, pcm_data, pcm_data_size);
|
|
||||||
|
|
||||||
// Wait until audio buffer is empty
|
|
||||||
while (SDL_GetQueuedAudioSize(0) > 0) {
|
|
||||||
SDL_Delay(100);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stop playing audio
|
|
||||||
SDL_PauseAudio(1);
|
|
||||||
|
|
||||||
// Close audio device
|
|
||||||
SDL_CloseAudio();
|
|
||||||
|
|
||||||
// Quit SDL
|
|
||||||
SDL_Quit();
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
@@ -1,89 +0,0 @@
|
|||||||
#include <SDL2/SDL.h>
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <stdlib.h>
|
|
||||||
#include <string.h>
|
|
||||||
|
|
||||||
int main(int argc, char* argv[]) {
|
|
||||||
if (SDL_Init(SDL_INIT_AUDIO)) {
|
|
||||||
printf("SDL init error\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// SDL_AudioSpec
|
|
||||||
SDL_AudioSpec wanted_spec;
|
|
||||||
SDL_zero(wanted_spec);
|
|
||||||
wanted_spec.freq = 48000;
|
|
||||||
wanted_spec.format = AUDIO_S16LSB;
|
|
||||||
wanted_spec.channels = 2;
|
|
||||||
wanted_spec.silence = 0;
|
|
||||||
wanted_spec.samples = 960;
|
|
||||||
wanted_spec.callback = NULL;
|
|
||||||
|
|
||||||
SDL_AudioDeviceID deviceID = 0;
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>豸
|
|
||||||
if ((deviceID = SDL_OpenAudioDevice(NULL, 0, &wanted_spec, NULL,
|
|
||||||
SDL_AUDIO_ALLOW_FREQUENCY_CHANGE)) < 2) {
|
|
||||||
printf("could not open audio device: %s\n", SDL_GetError());
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>е<EFBFBD><D0B5><EFBFBD>ϵͳ
|
|
||||||
SDL_Quit();
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_PauseAudioDevice(deviceID, 0);
|
|
||||||
|
|
||||||
FILE* fp = nullptr;
|
|
||||||
|
|
||||||
fopen_s(&fp, "ls.pcm", "rb+");
|
|
||||||
if (fp == NULL) {
|
|
||||||
printf("cannot open this file\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fp == NULL) {
|
|
||||||
printf("error \n");
|
|
||||||
}
|
|
||||||
Uint32 buffer_size = 4096;
|
|
||||||
char* buffer = (char*)malloc(buffer_size);
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
if (fread(buffer, 1, buffer_size, fp) != buffer_size) {
|
|
||||||
printf("end of file\n");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
SDL_QueueAudio(deviceID, buffer, buffer_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
printf("Play...\n");
|
|
||||||
|
|
||||||
SDL_Delay(10000);
|
|
||||||
|
|
||||||
// Uint32 residueAudioLen = 0;
|
|
||||||
|
|
||||||
// while (true) {
|
|
||||||
// residueAudioLen = SDL_GetQueuedAudioSize(deviceID);
|
|
||||||
// printf("%10d\n", residueAudioLen);
|
|
||||||
// if (residueAudioLen <= 0) break;
|
|
||||||
// SDL_Delay(1);
|
|
||||||
// }
|
|
||||||
|
|
||||||
// while (true) {
|
|
||||||
// printf("1 <20><>ͣ 2 <20><><EFBFBD><EFBFBD> 3 <20>˳<EFBFBD> \n");
|
|
||||||
// int flag = 0;
|
|
||||||
|
|
||||||
// scanf_s("%d", &flag);
|
|
||||||
|
|
||||||
// if (flag == 1)
|
|
||||||
// SDL_PauseAudioDevice(deviceID, 1);
|
|
||||||
// else if (flag == 2)
|
|
||||||
// SDL_PauseAudioDevice(deviceID, 0);
|
|
||||||
// else if (flag == 3)
|
|
||||||
// break;
|
|
||||||
// }
|
|
||||||
|
|
||||||
SDL_CloseAudio();
|
|
||||||
SDL_Quit();
|
|
||||||
fclose(fp);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
@@ -1,225 +0,0 @@
|
|||||||
#include <SDL2/SDL.h>
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <stdlib.h>
|
|
||||||
|
|
||||||
extern "C" {
|
|
||||||
#include <libavcodec/avcodec.h>
|
|
||||||
#include <libavdevice/avdevice.h>
|
|
||||||
#include <libavfilter/avfilter.h>
|
|
||||||
#include <libavformat/avformat.h>
|
|
||||||
#include <libavutil/channel_layout.h>
|
|
||||||
#include <libavutil/imgutils.h>
|
|
||||||
#include <libavutil/opt.h>
|
|
||||||
#include <libavutil/samplefmt.h>
|
|
||||||
#include <libswresample/swresample.h>
|
|
||||||
#include <libswscale/swscale.h>
|
|
||||||
};
|
|
||||||
|
|
||||||
static SDL_AudioDeviceID input_dev;
|
|
||||||
static SDL_AudioDeviceID output_dev;
|
|
||||||
|
|
||||||
static Uint8 *buffer = 0;
|
|
||||||
static int in_pos = 0;
|
|
||||||
static int out_pos = 0;
|
|
||||||
|
|
||||||
int64_t src_ch_layout = AV_CH_LAYOUT_MONO;
|
|
||||||
int src_rate = 48000;
|
|
||||||
enum AVSampleFormat src_sample_fmt = AV_SAMPLE_FMT_S16;
|
|
||||||
int src_nb_channels = 0;
|
|
||||||
uint8_t **src_data = NULL; // <20><><EFBFBD><EFBFBD>ָ<EFBFBD><D6B8>
|
|
||||||
int src_linesize;
|
|
||||||
int src_nb_samples = 480;
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
int64_t dst_ch_layout = AV_CH_LAYOUT_MONO;
|
|
||||||
int dst_rate = 48000;
|
|
||||||
enum AVSampleFormat dst_sample_fmt = AV_SAMPLE_FMT_S16;
|
|
||||||
int dst_nb_channels = 0;
|
|
||||||
uint8_t **dst_data = NULL; // <20><><EFBFBD><EFBFBD>ָ<EFBFBD><D6B8>
|
|
||||||
int dst_linesize;
|
|
||||||
int dst_nb_samples;
|
|
||||||
int max_dst_nb_samples;
|
|
||||||
static unsigned char audio_buffer[960 * 3];
|
|
||||||
static int audio_len = 0;
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>ļ<EFBFBD>
|
|
||||||
const char *dst_filename = NULL; // <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>pcm<63><6D><EFBFBD><EFBFBD><EFBFBD>أ<EFBFBD>Ȼ<EFBFBD><EFBFBD><F3B2A5B7><EFBFBD>֤
|
|
||||||
FILE *dst_file;
|
|
||||||
|
|
||||||
int dst_bufsize;
|
|
||||||
const char *fmt;
|
|
||||||
|
|
||||||
// <20>ز<EFBFBD><D8B2><EFBFBD>ʵ<EFBFBD><CAB5>
|
|
||||||
struct SwrContext *swr_ctx;
|
|
||||||
|
|
||||||
double t;
|
|
||||||
int ret;
|
|
||||||
|
|
||||||
char *out = "audio_old.pcm";
|
|
||||||
FILE *outfile = fopen(out, "wb+");
|
|
||||||
|
|
||||||
void cb_in(void *userdata, Uint8 *stream, int len) {
|
|
||||||
// If len < 4, the printf below will probably segfault
|
|
||||||
// SDL_QueueAudio(output_dev, stream, len);
|
|
||||||
|
|
||||||
int64_t delay = swr_get_delay(swr_ctx, src_rate);
|
|
||||||
dst_nb_samples =
|
|
||||||
av_rescale_rnd(delay + src_nb_samples, dst_rate, src_rate, AV_ROUND_UP);
|
|
||||||
if (dst_nb_samples > max_dst_nb_samples) {
|
|
||||||
av_freep(&dst_data[0]);
|
|
||||||
ret = av_samples_alloc(dst_data, &dst_linesize, dst_nb_channels,
|
|
||||||
dst_nb_samples, dst_sample_fmt, 1);
|
|
||||||
if (ret < 0) return;
|
|
||||||
max_dst_nb_samples = dst_nb_samples;
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = swr_convert(swr_ctx, dst_data, dst_nb_samples,
|
|
||||||
(const uint8_t **)&stream, src_nb_samples);
|
|
||||||
if (ret < 0) {
|
|
||||||
fprintf(stderr, "Error while converting\n");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
dst_bufsize = av_samples_get_buffer_size(&dst_linesize, dst_nb_channels, ret,
|
|
||||||
dst_sample_fmt, 1);
|
|
||||||
if (dst_bufsize < 0) {
|
|
||||||
fprintf(stderr, "Could not get sample buffer size\n");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
printf("t:%f in:%d out:%d %d\n", t, src_nb_samples, ret, len);
|
|
||||||
|
|
||||||
memcpy(audio_buffer, dst_data[0], len);
|
|
||||||
// SDL_QueueAudio(output_dev, dst_data[0], len);
|
|
||||||
audio_len = len;
|
|
||||||
}
|
|
||||||
|
|
||||||
void cb_out(void *userdata, Uint8 *stream, int len) {
|
|
||||||
// If len < 4, the printf below will probably segfault
|
|
||||||
printf("cb_out len = %d\n", len);
|
|
||||||
SDL_memset(stream, 0, len);
|
|
||||||
if (audio_len == 0) return;
|
|
||||||
len = (len > audio_len ? audio_len : len);
|
|
||||||
SDL_MixAudioFormat(stream, audio_buffer, AUDIO_S16LSB, len,
|
|
||||||
SDL_MIX_MAXVOLUME);
|
|
||||||
}
|
|
||||||
|
|
||||||
int init() {
|
|
||||||
dst_filename = "res.pcm";
|
|
||||||
|
|
||||||
dst_file = fopen(dst_filename, "wb");
|
|
||||||
if (!dst_file) {
|
|
||||||
fprintf(stderr, "Could not open destination file %s\n", dst_filename);
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>ز<EFBFBD><D8B2><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
/* create resampler context */
|
|
||||||
swr_ctx = swr_alloc();
|
|
||||||
if (!swr_ctx) {
|
|
||||||
fprintf(stderr, "Could not allocate resampler context\n");
|
|
||||||
ret = AVERROR(ENOMEM);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>ز<EFBFBD><D8B2><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
/* set options */
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
av_opt_set_int(swr_ctx, "in_channel_layout", src_ch_layout, 0);
|
|
||||||
av_opt_set_int(swr_ctx, "in_sample_rate", src_rate, 0);
|
|
||||||
av_opt_set_sample_fmt(swr_ctx, "in_sample_fmt", src_sample_fmt, 0);
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
av_opt_set_int(swr_ctx, "out_channel_layout", dst_ch_layout, 0);
|
|
||||||
av_opt_set_int(swr_ctx, "out_sample_rate", dst_rate, 0);
|
|
||||||
av_opt_set_sample_fmt(swr_ctx, "out_sample_fmt", dst_sample_fmt, 0);
|
|
||||||
|
|
||||||
// <20><>ʼ<EFBFBD><CABC><EFBFBD>ز<EFBFBD><D8B2><EFBFBD>
|
|
||||||
/* initialize the resampling context */
|
|
||||||
if ((ret = swr_init(swr_ctx)) < 0) {
|
|
||||||
fprintf(stderr, "Failed to initialize the resampling context\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* allocate source and destination samples buffers */
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Դ<EFBFBD><D4B4>ͨ<EFBFBD><CDA8><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
src_nb_channels = av_get_channel_layout_nb_channels(src_ch_layout);
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Դ<EFBFBD><D4B4><EFBFBD><EFBFBD><EFBFBD>ڴ<EFBFBD><DAB4>ռ<EFBFBD>
|
|
||||||
ret = av_samples_alloc_array_and_samples(&src_data, &src_linesize,
|
|
||||||
src_nb_channels, src_nb_samples,
|
|
||||||
src_sample_fmt, 0);
|
|
||||||
if (ret < 0) {
|
|
||||||
fprintf(stderr, "Could not allocate source samples\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* compute the number of converted samples: buffering is avoided
|
|
||||||
* ensuring that the output buffer will contain at least all the
|
|
||||||
* converted input samples */
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
max_dst_nb_samples = dst_nb_samples =
|
|
||||||
av_rescale_rnd(src_nb_samples, dst_rate, src_rate, AV_ROUND_UP);
|
|
||||||
|
|
||||||
/* buffer is going to be directly written to a rawaudio file, no alignment */
|
|
||||||
dst_nb_channels = av_get_channel_layout_nb_channels(dst_ch_layout);
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ڴ<EFBFBD>
|
|
||||||
ret = av_samples_alloc_array_and_samples(&dst_data, &dst_linesize,
|
|
||||||
dst_nb_channels, dst_nb_samples,
|
|
||||||
dst_sample_fmt, 0);
|
|
||||||
if (ret < 0) {
|
|
||||||
fprintf(stderr, "Could not allocate destination samples\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
init();
|
|
||||||
|
|
||||||
SDL_Init(SDL_INIT_AUDIO);
|
|
||||||
|
|
||||||
// 16Mb should be enough; the test lasts 5 seconds
|
|
||||||
buffer = (Uint8 *)malloc(16777215);
|
|
||||||
|
|
||||||
SDL_AudioSpec want_in, want_out, have_in, have_out;
|
|
||||||
|
|
||||||
SDL_zero(want_in);
|
|
||||||
want_in.freq = 48000;
|
|
||||||
want_in.format = AUDIO_S16LSB;
|
|
||||||
want_in.channels = 1;
|
|
||||||
want_in.samples = 480;
|
|
||||||
want_in.callback = cb_in;
|
|
||||||
|
|
||||||
input_dev = SDL_OpenAudioDevice(NULL, 1, &want_in, &have_in, 0);
|
|
||||||
|
|
||||||
printf("%d %d %d %d\n", have_in.freq, have_in.format, have_in.channels,
|
|
||||||
have_in.samples);
|
|
||||||
if (input_dev == 0) {
|
|
||||||
SDL_Log("Failed to open input: %s", SDL_GetError());
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_zero(want_out);
|
|
||||||
want_out.freq = 48000;
|
|
||||||
want_out.format = AUDIO_S16LSB;
|
|
||||||
want_out.channels = 1;
|
|
||||||
want_out.samples = 480;
|
|
||||||
want_out.callback = cb_out;
|
|
||||||
|
|
||||||
output_dev = SDL_OpenAudioDevice(NULL, 0, &want_out, &have_out, 0);
|
|
||||||
|
|
||||||
printf("%d %d %d %d\n", have_out.freq, have_out.format, have_out.channels,
|
|
||||||
have_out.samples);
|
|
||||||
if (output_dev == 0) {
|
|
||||||
SDL_Log("Failed to open input: %s", SDL_GetError());
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_PauseAudioDevice(input_dev, 0);
|
|
||||||
SDL_PauseAudioDevice(output_dev, 0);
|
|
||||||
|
|
||||||
while (1) {
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_CloseAudioDevice(output_dev);
|
|
||||||
SDL_CloseAudioDevice(input_dev);
|
|
||||||
free(buffer);
|
|
||||||
|
|
||||||
fclose(outfile);
|
|
||||||
}
|
|
||||||
@@ -1,95 +0,0 @@
|
|||||||
#include <libavcodec/avcodec.h>
|
|
||||||
#include <libavformat/avformat.h>
|
|
||||||
#include <libswresample/swresample.h>
|
|
||||||
#include <stdint.h>
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <stdlib.h>
|
|
||||||
#include <string.h>
|
|
||||||
#include <unistd.h>
|
|
||||||
|
|
||||||
int main(int argc, char *argv[]) {
|
|
||||||
int ret;
|
|
||||||
AVFrame *frame = NULL;
|
|
||||||
AVFrame *resampled_frame = NULL;
|
|
||||||
AVCodecContext *codec_ctx = NULL;
|
|
||||||
SwrContext *swr_ctx = NULL;
|
|
||||||
|
|
||||||
// Initialize FFmpeg
|
|
||||||
av_log_set_level(AV_LOG_INFO);
|
|
||||||
av_register_all();
|
|
||||||
|
|
||||||
// Allocate input frame
|
|
||||||
frame = av_frame_alloc();
|
|
||||||
if (!frame) {
|
|
||||||
av_log(NULL, AV_LOG_ERROR, "Failed to allocate input frame\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Allocate output frame for resampled data
|
|
||||||
resampled_frame = av_frame_alloc();
|
|
||||||
if (!resampled_frame) {
|
|
||||||
av_log(NULL, AV_LOG_ERROR, "Failed to allocate output frame\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set input frame properties
|
|
||||||
frame->format = AV_SAMPLE_FMT_FLTP; // Input sample format (float planar)
|
|
||||||
frame->channel_layout = AV_CH_LAYOUT_STEREO; // Input channel layout (stereo)
|
|
||||||
frame->sample_rate = 44100; // Input sample rate (44100 Hz)
|
|
||||||
frame->nb_samples = 1024; // Number of input samples
|
|
||||||
|
|
||||||
// Set output frame properties
|
|
||||||
resampled_frame->format =
|
|
||||||
AV_SAMPLE_FMT_S16; // Output sample format (signed 16-bit)
|
|
||||||
resampled_frame->channel_layout =
|
|
||||||
AV_CH_LAYOUT_STEREO; // Output channel layout (stereo)
|
|
||||||
resampled_frame->sample_rate = 48000; // Output sample rate (48000 Hz)
|
|
||||||
resampled_frame->nb_samples = av_rescale_rnd(
|
|
||||||
frame->nb_samples, resampled_frame->sample_rate, frame->sample_rate,
|
|
||||||
AV_ROUND_UP); // Number of output samples
|
|
||||||
|
|
||||||
// Initialize resampler context
|
|
||||||
swr_ctx = swr_alloc_set_opts(
|
|
||||||
NULL, av_get_default_channel_layout(resampled_frame->channel_layout),
|
|
||||||
av_get_default_sample_fmt(resampled_frame->format),
|
|
||||||
resampled_frame->sample_rate,
|
|
||||||
av_get_default_channel_layout(frame->channel_layout),
|
|
||||||
av_get_default_sample_fmt(frame->format), frame->sample_rate, 0, NULL);
|
|
||||||
if (!swr_ctx) {
|
|
||||||
av_log(NULL, AV_LOG_ERROR, "Failed to allocate resampler context\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize and configure the resampler
|
|
||||||
if ((ret = swr_init(swr_ctx)) < 0) {
|
|
||||||
av_log(NULL, AV_LOG_ERROR, "Failed to initialize resampler context: %s\n",
|
|
||||||
av_err2str(ret));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Allocate buffer for output samples
|
|
||||||
ret = av_samples_alloc(resampled_frame->data, resampled_frame->linesize,
|
|
||||||
resampled_frame->channels, resampled_frame->nb_samples,
|
|
||||||
resampled_frame->format, 0);
|
|
||||||
if (ret < 0) {
|
|
||||||
av_log(NULL, AV_LOG_ERROR, "Failed to allocate output samples buffer: %s\n",
|
|
||||||
av_err2str(ret));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resample the input data
|
|
||||||
ret = swr_convert(swr_ctx, resampled_frame->data, resampled_frame->nb_samples,
|
|
||||||
(const uint8_t **)frame->data, frame->nb_samples);
|
|
||||||
if (ret < 0) {
|
|
||||||
av_log(NULL, AV_LOG_ERROR, "Failed to resample input data: %s\n",
|
|
||||||
av_err2str(ret));
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cleanup and free resources
|
|
||||||
swr_free(&swr_ctx);
|
|
||||||
av_frame_free(&frame);
|
|
||||||
av_frame_free(&resampled_frame);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
@@ -1,205 +0,0 @@
|
|||||||
#include <SDL2/SDL.h>
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <stdlib.h>
|
|
||||||
|
|
||||||
extern "C" {
|
|
||||||
#include <libavcodec/avcodec.h>
|
|
||||||
#include <libavdevice/avdevice.h>
|
|
||||||
#include <libavfilter/avfilter.h>
|
|
||||||
#include <libavformat/avformat.h>
|
|
||||||
#include <libavutil/channel_layout.h>
|
|
||||||
#include <libavutil/imgutils.h>
|
|
||||||
#include <libavutil/opt.h>
|
|
||||||
#include <libavutil/samplefmt.h>
|
|
||||||
#include <libswresample/swresample.h>
|
|
||||||
#include <libswscale/swscale.h>
|
|
||||||
};
|
|
||||||
|
|
||||||
static SDL_AudioDeviceID input_dev;
|
|
||||||
static SDL_AudioDeviceID output_dev;
|
|
||||||
|
|
||||||
static Uint8 *buffer = 0;
|
|
||||||
static int in_pos = 0;
|
|
||||||
static int out_pos = 0;
|
|
||||||
|
|
||||||
int64_t src_ch_layout = AV_CH_LAYOUT_MONO;
|
|
||||||
int src_rate = 48000;
|
|
||||||
enum AVSampleFormat src_sample_fmt = AV_SAMPLE_FMT_FLT;
|
|
||||||
int src_nb_channels = 0;
|
|
||||||
uint8_t **src_data = NULL; // <20><><EFBFBD><EFBFBD>ָ<EFBFBD><D6B8>
|
|
||||||
int src_linesize;
|
|
||||||
int src_nb_samples = 480;
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
int64_t dst_ch_layout = AV_CH_LAYOUT_STEREO;
|
|
||||||
int dst_rate = 48000;
|
|
||||||
enum AVSampleFormat dst_sample_fmt = AV_SAMPLE_FMT_S16;
|
|
||||||
int dst_nb_channels = 0;
|
|
||||||
uint8_t **dst_data = NULL; // <20><><EFBFBD><EFBFBD>ָ<EFBFBD><D6B8>
|
|
||||||
int dst_linesize;
|
|
||||||
int dst_nb_samples;
|
|
||||||
int max_dst_nb_samples;
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>ļ<EFBFBD>
|
|
||||||
const char *dst_filename = NULL; // <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>pcm<63><6D><EFBFBD><EFBFBD><EFBFBD>أ<EFBFBD>Ȼ<EFBFBD><EFBFBD><F3B2A5B7><EFBFBD>֤
|
|
||||||
FILE *dst_file;
|
|
||||||
|
|
||||||
int dst_bufsize;
|
|
||||||
const char *fmt;
|
|
||||||
|
|
||||||
// <20>ز<EFBFBD><D8B2><EFBFBD>ʵ<EFBFBD><CAB5>
|
|
||||||
struct SwrContext *swr_ctx;
|
|
||||||
|
|
||||||
double t;
|
|
||||||
int ret;
|
|
||||||
|
|
||||||
char *out = "audio_old.pcm";
|
|
||||||
FILE *outfile = fopen(out, "wb+");
|
|
||||||
|
|
||||||
void cb_in(void *userdata, Uint8 *stream, int len) {
|
|
||||||
// If len < 4, the printf below will probably segfault
|
|
||||||
{
|
|
||||||
fwrite(stream, 1, len, outfile);
|
|
||||||
fflush(outfile);
|
|
||||||
}
|
|
||||||
{
|
|
||||||
int64_t delay = swr_get_delay(swr_ctx, src_rate);
|
|
||||||
dst_nb_samples =
|
|
||||||
av_rescale_rnd(delay + src_nb_samples, dst_rate, src_rate, AV_ROUND_UP);
|
|
||||||
if (dst_nb_samples > max_dst_nb_samples) {
|
|
||||||
av_freep(&dst_data[0]);
|
|
||||||
ret = av_samples_alloc(dst_data, &dst_linesize, dst_nb_channels,
|
|
||||||
dst_nb_samples, dst_sample_fmt, 1);
|
|
||||||
if (ret < 0) return;
|
|
||||||
max_dst_nb_samples = dst_nb_samples;
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = swr_convert(swr_ctx, dst_data, dst_nb_samples,
|
|
||||||
(const uint8_t **)&stream, src_nb_samples);
|
|
||||||
if (ret < 0) {
|
|
||||||
fprintf(stderr, "Error while converting\n");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
dst_bufsize = av_samples_get_buffer_size(&dst_linesize, dst_nb_channels,
|
|
||||||
ret, dst_sample_fmt, 1);
|
|
||||||
if (dst_bufsize < 0) {
|
|
||||||
fprintf(stderr, "Could not get sample buffer size\n");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
printf("t:%f in:%d out:%d\n", t, src_nb_samples, ret);
|
|
||||||
fwrite(dst_data[0], 1, dst_bufsize, dst_file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void cb_out(void *userdata, Uint8 *stream, int len) {
|
|
||||||
// If len < 4, the printf below will probably segfault
|
|
||||||
|
|
||||||
SDL_memcpy(buffer + out_pos, stream, len);
|
|
||||||
out_pos += len;
|
|
||||||
}
|
|
||||||
|
|
||||||
int init() {
|
|
||||||
dst_filename = "res.pcm";
|
|
||||||
|
|
||||||
dst_file = fopen(dst_filename, "wb");
|
|
||||||
if (!dst_file) {
|
|
||||||
fprintf(stderr, "Could not open destination file %s\n", dst_filename);
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>ز<EFBFBD><D8B2><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
/* create resampler context */
|
|
||||||
swr_ctx = swr_alloc();
|
|
||||||
if (!swr_ctx) {
|
|
||||||
fprintf(stderr, "Could not allocate resampler context\n");
|
|
||||||
ret = AVERROR(ENOMEM);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>ز<EFBFBD><D8B2><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
/* set options */
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
av_opt_set_int(swr_ctx, "in_channel_layout", src_ch_layout, 0);
|
|
||||||
av_opt_set_int(swr_ctx, "in_sample_rate", src_rate, 0);
|
|
||||||
av_opt_set_sample_fmt(swr_ctx, "in_sample_fmt", src_sample_fmt, 0);
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
av_opt_set_int(swr_ctx, "out_channel_layout", dst_ch_layout, 0);
|
|
||||||
av_opt_set_int(swr_ctx, "out_sample_rate", dst_rate, 0);
|
|
||||||
av_opt_set_sample_fmt(swr_ctx, "out_sample_fmt", dst_sample_fmt, 0);
|
|
||||||
|
|
||||||
// <20><>ʼ<EFBFBD><CABC><EFBFBD>ز<EFBFBD><D8B2><EFBFBD>
|
|
||||||
/* initialize the resampling context */
|
|
||||||
if ((ret = swr_init(swr_ctx)) < 0) {
|
|
||||||
fprintf(stderr, "Failed to initialize the resampling context\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* allocate source and destination samples buffers */
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Դ<EFBFBD><D4B4>ͨ<EFBFBD><CDA8><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
src_nb_channels = av_get_channel_layout_nb_channels(src_ch_layout);
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Դ<EFBFBD><D4B4><EFBFBD><EFBFBD><EFBFBD>ڴ<EFBFBD><DAB4>ռ<EFBFBD>
|
|
||||||
ret = av_samples_alloc_array_and_samples(&src_data, &src_linesize,
|
|
||||||
src_nb_channels, src_nb_samples,
|
|
||||||
src_sample_fmt, 0);
|
|
||||||
if (ret < 0) {
|
|
||||||
fprintf(stderr, "Could not allocate source samples\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* compute the number of converted samples: buffering is avoided
|
|
||||||
* ensuring that the output buffer will contain at least all the
|
|
||||||
* converted input samples */
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
max_dst_nb_samples = dst_nb_samples =
|
|
||||||
av_rescale_rnd(src_nb_samples, dst_rate, src_rate, AV_ROUND_UP);
|
|
||||||
|
|
||||||
/* buffer is going to be directly written to a rawaudio file, no alignment */
|
|
||||||
dst_nb_channels = av_get_channel_layout_nb_channels(dst_ch_layout);
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ڴ<EFBFBD>
|
|
||||||
ret = av_samples_alloc_array_and_samples(&dst_data, &dst_linesize,
|
|
||||||
dst_nb_channels, dst_nb_samples,
|
|
||||||
dst_sample_fmt, 0);
|
|
||||||
if (ret < 0) {
|
|
||||||
fprintf(stderr, "Could not allocate destination samples\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
init();
|
|
||||||
|
|
||||||
SDL_Init(SDL_INIT_AUDIO);
|
|
||||||
|
|
||||||
// 16Mb should be enough; the test lasts 5 seconds
|
|
||||||
buffer = (Uint8 *)malloc(16777215);
|
|
||||||
|
|
||||||
SDL_AudioSpec want_in, want_out, have_in, have_out;
|
|
||||||
|
|
||||||
SDL_zero(want_in);
|
|
||||||
want_in.freq = 48000;
|
|
||||||
want_in.format = AUDIO_F32LSB;
|
|
||||||
want_in.channels = 2;
|
|
||||||
want_in.samples = 960;
|
|
||||||
want_in.callback = cb_in;
|
|
||||||
|
|
||||||
input_dev = SDL_OpenAudioDevice(NULL, 1, &want_in, &have_in,
|
|
||||||
SDL_AUDIO_ALLOW_ANY_CHANGE);
|
|
||||||
|
|
||||||
printf("%d %d %d %d\n", have_in.freq, have_in.format, have_in.channels,
|
|
||||||
have_in.samples);
|
|
||||||
if (input_dev == 0) {
|
|
||||||
SDL_Log("Failed to open input: %s", SDL_GetError());
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_PauseAudioDevice(input_dev, 0);
|
|
||||||
SDL_PauseAudioDevice(output_dev, 0);
|
|
||||||
|
|
||||||
SDL_Delay(5000);
|
|
||||||
|
|
||||||
SDL_CloseAudioDevice(output_dev);
|
|
||||||
SDL_CloseAudioDevice(input_dev);
|
|
||||||
free(buffer);
|
|
||||||
|
|
||||||
fclose(outfile);
|
|
||||||
}
|
|
||||||
@@ -1,123 +0,0 @@
|
|||||||
#define __STDC_CONSTANT_MACROS
|
|
||||||
extern "C" {
|
|
||||||
#include <libavdevice/avdevice.h>
|
|
||||||
#include <libavformat/avformat.h>
|
|
||||||
#include <libavutil/log.h>
|
|
||||||
#include <libswresample/swresample.h>
|
|
||||||
}
|
|
||||||
|
|
||||||
#include <windows.h>
|
|
||||||
|
|
||||||
#include <memory>
|
|
||||||
#include <string>
|
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
#pragma comment(lib, "avutil.lib")
|
|
||||||
#pragma comment(lib, "avdevice.lib")
|
|
||||||
#pragma comment(lib, "avformat.lib")
|
|
||||||
#pragma comment(lib, "avcodec.lib")
|
|
||||||
|
|
||||||
#pragma comment(lib, "Winmm.lib")
|
|
||||||
|
|
||||||
using std::shared_ptr;
|
|
||||||
using std::string;
|
|
||||||
using std::vector;
|
|
||||||
|
|
||||||
void capture_audio() {
|
|
||||||
// windows api <20><>ȡ<EFBFBD><C8A1>Ƶ<EFBFBD>豸<EFBFBD>б<EFBFBD><D0B1><EFBFBD>ffmpeg <20><><EFBFBD><EFBFBD>û<EFBFBD><C3BB><EFBFBD>ṩ<EFBFBD><E1B9A9>ȡ<EFBFBD><C8A1><EFBFBD><EFBFBD>Ƶ<EFBFBD>豸<EFBFBD><E8B1B8>api<70><69>
|
|
||||||
int nDeviceNum = waveInGetNumDevs();
|
|
||||||
vector<string> vecDeviceName;
|
|
||||||
for (int i = 0; i < nDeviceNum; ++i) {
|
|
||||||
WAVEINCAPS wic;
|
|
||||||
waveInGetDevCaps(i, &wic, sizeof(wic));
|
|
||||||
|
|
||||||
// ת<><D7AA>utf-8
|
|
||||||
int nSize = WideCharToMultiByte(CP_UTF8, 0, wic.szPname,
|
|
||||||
wcslen(wic.szPname), NULL, 0, NULL, NULL);
|
|
||||||
shared_ptr<char> spDeviceName(new char[nSize + 1]);
|
|
||||||
memset(spDeviceName.get(), 0, nSize + 1);
|
|
||||||
WideCharToMultiByte(CP_UTF8, 0, wic.szPname, wcslen(wic.szPname),
|
|
||||||
spDeviceName.get(), nSize, NULL, NULL);
|
|
||||||
vecDeviceName.push_back(spDeviceName.get());
|
|
||||||
av_log(NULL, AV_LOG_DEBUG, "audio input device : %s \n",
|
|
||||||
spDeviceName.get());
|
|
||||||
}
|
|
||||||
if (vecDeviceName.size() <= 0) {
|
|
||||||
av_log(NULL, AV_LOG_ERROR, "not find audio input device.\n");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
string sDeviceName = "audio=" + vecDeviceName[0]; // ʹ<>õ<EFBFBD>һ<EFBFBD><D2BB><EFBFBD><EFBFBD>Ƶ<EFBFBD>豸
|
|
||||||
|
|
||||||
// ffmpeg
|
|
||||||
avdevice_register_all(); // ע<><D7A2><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>豸
|
|
||||||
AVInputFormat* ifmt =
|
|
||||||
(AVInputFormat*)av_find_input_format("dshow"); // <20><><EFBFBD>òɼ<C3B2><C9BC><EFBFBD>ʽ dshow
|
|
||||||
if (ifmt == NULL) {
|
|
||||||
av_log(NULL, AV_LOG_ERROR, "av_find_input_format for dshow fail.\n");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
AVFormatContext* fmt_ctx = NULL; // format <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
int ret = avformat_open_input(&fmt_ctx, sDeviceName.c_str(), ifmt,
|
|
||||||
NULL); // <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ƶ<EFBFBD>豸
|
|
||||||
if (ret != 0) {
|
|
||||||
av_log(NULL, AV_LOG_ERROR, "avformat_open_input fail. return %d.\n", ret);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
AVPacket pkt;
|
|
||||||
|
|
||||||
int64_t src_rate = 44100;
|
|
||||||
int64_t dst_rate = 48000;
|
|
||||||
SwrContext* swr_ctx = swr_alloc();
|
|
||||||
|
|
||||||
uint8_t** dst_data = NULL;
|
|
||||||
int dst_linesize = 0;
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
av_opt_set_int(swr_ctx, "in_channel_layout", AV_CH_LAYOUT_MONO, 0);
|
|
||||||
av_opt_set_int(swr_ctx, "in_sample_rate", src_rate, 0);
|
|
||||||
av_opt_set_sample_fmt(swr_ctx, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0);
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
av_opt_set_int(swr_ctx, "out_channel_layout", AV_CH_LAYOUT_STEREO, 0);
|
|
||||||
av_opt_set_int(swr_ctx, "out_sample_rate", dst_rate, 0);
|
|
||||||
av_opt_set_sample_fmt(swr_ctx, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);
|
|
||||||
// <20><>ʼ<EFBFBD><CABC>SwrContext
|
|
||||||
swr_init(swr_ctx);
|
|
||||||
|
|
||||||
FILE* fp = fopen("dst.pcm", "wb");
|
|
||||||
int count = 0;
|
|
||||||
while (count++ < 10) {
|
|
||||||
ret = av_read_frame(fmt_ctx, &pkt);
|
|
||||||
if (ret != 0) {
|
|
||||||
av_log(NULL, AV_LOG_ERROR, "av_read_frame fail, return %d .\n", ret);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
int out_samples_per_channel =
|
|
||||||
(int)av_rescale_rnd(1024, dst_rate, src_rate, AV_ROUND_UP);
|
|
||||||
int out_buffer_size = av_samples_get_buffer_size(
|
|
||||||
NULL, 1, out_samples_per_channel, AV_SAMPLE_FMT_S16, 0);
|
|
||||||
// uint8_t* out_buffer = (uint8_t*)av_malloc(out_buffer_size);
|
|
||||||
ret = av_samples_alloc_array_and_samples(
|
|
||||||
&dst_data, &dst_linesize, 2, out_buffer_size, AV_SAMPLE_FMT_S16, 0);
|
|
||||||
|
|
||||||
// <20><><EFBFBD><EFBFBD><EFBFBD>ز<EFBFBD><D8B2><EFBFBD>
|
|
||||||
swr_convert(swr_ctx, dst_data, out_samples_per_channel,
|
|
||||||
(const uint8_t**)&pkt.data, 1024);
|
|
||||||
|
|
||||||
fwrite(dst_data[1], 1, out_buffer_size, fp);
|
|
||||||
av_packet_unref(&pkt); // <20><><EFBFBD><EFBFBD><EFBFBD>ͷ<EFBFBD>pkt<6B><74><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ڴ棬<DAB4><E6A3AC><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ڴ<EFBFBD>й¶
|
|
||||||
}
|
|
||||||
fflush(fp); // ˢ<><CBA2><EFBFBD>ļ<EFBFBD>io<69><6F><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
|
||||||
fclose(fp);
|
|
||||||
|
|
||||||
avformat_close_input(&fmt_ctx);
|
|
||||||
}
|
|
||||||
|
|
||||||
int main(int argc, char** argv) {
|
|
||||||
av_log_set_level(AV_LOG_DEBUG); // <20><><EFBFBD><EFBFBD>ffmpeg<65><67>־<EFBFBD><D6BE><EFBFBD>ȼ<EFBFBD>
|
|
||||||
capture_audio();
|
|
||||||
|
|
||||||
Sleep(1);
|
|
||||||
}
|
|
||||||
@@ -1,150 +0,0 @@
|
|||||||
#include <X11/Xlib.h>
|
|
||||||
#include <fcntl.h>
|
|
||||||
#include <linux/uinput.h>
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <stdlib.h>
|
|
||||||
#include <unistd.h>
|
|
||||||
|
|
||||||
#include <cstring>
|
|
||||||
#include <iostream>
|
|
||||||
#include <sstream>
|
|
||||||
|
|
||||||
using namespace std;
|
|
||||||
|
|
||||||
int fd;
|
|
||||||
Display *dpy;
|
|
||||||
|
|
||||||
void initMouse();
|
|
||||||
void destroyMouse();
|
|
||||||
void mouseLeftClick();
|
|
||||||
void mouseRightClick();
|
|
||||||
void mouseGetPosition(int &x, int &y);
|
|
||||||
void mouseMove(int xdelta, int ydelta);
|
|
||||||
|
|
||||||
void initMouse() {
|
|
||||||
fd = open("/dev/uinput", O_WRONLY | O_NONBLOCK);
|
|
||||||
ioctl(fd, UI_SET_EVBIT, EV_KEY);
|
|
||||||
ioctl(fd, UI_SET_KEYBIT, BTN_RIGHT);
|
|
||||||
ioctl(fd, UI_SET_KEYBIT, BTN_LEFT);
|
|
||||||
ioctl(fd, UI_SET_EVBIT, EV_ABS);
|
|
||||||
ioctl(fd, UI_SET_ABSBIT, ABS_X);
|
|
||||||
ioctl(fd, UI_SET_ABSBIT, ABS_Y);
|
|
||||||
ioctl(fd, UI_SET_EVBIT, EV_REL);
|
|
||||||
|
|
||||||
struct uinput_user_dev uidev;
|
|
||||||
memset(&uidev, 0, sizeof(uidev));
|
|
||||||
snprintf(uidev.name, UINPUT_MAX_NAME_SIZE, "VirtualMouse");
|
|
||||||
uidev.id.bustype = BUS_USB;
|
|
||||||
uidev.id.version = 1;
|
|
||||||
uidev.id.vendor = 0x1;
|
|
||||||
uidev.id.product = 0x1;
|
|
||||||
uidev.absmin[ABS_X] = 0;
|
|
||||||
uidev.absmax[ABS_X] = 3200;
|
|
||||||
uidev.absmin[ABS_Y] = 0;
|
|
||||||
uidev.absmax[ABS_Y] = 900;
|
|
||||||
|
|
||||||
write(fd, &uidev, sizeof(uidev));
|
|
||||||
ioctl(fd, UI_DEV_CREATE);
|
|
||||||
|
|
||||||
sleep(2);
|
|
||||||
}
|
|
||||||
|
|
||||||
void mouseLeftClick() {
|
|
||||||
struct input_event ev_click, ev_sync;
|
|
||||||
memset(&ev_click, 0, sizeof(ev_click));
|
|
||||||
memset(&ev_sync, 0, sizeof(ev_sync));
|
|
||||||
|
|
||||||
ev_click.type = EV_KEY;
|
|
||||||
ev_click.code = BTN_LEFT;
|
|
||||||
ev_click.value = 1;
|
|
||||||
|
|
||||||
// write left click event
|
|
||||||
write(fd, &ev_click, sizeof(ev_click));
|
|
||||||
|
|
||||||
// sync left click event
|
|
||||||
ev_sync.type = EV_SYN;
|
|
||||||
write(fd, &ev_sync, sizeof(ev_sync));
|
|
||||||
}
|
|
||||||
|
|
||||||
void mouseRightClick() {
|
|
||||||
struct input_event ev_click, ev_sync;
|
|
||||||
memset(&ev_click, 0, sizeof(ev_click));
|
|
||||||
memset(&ev_sync, 0, sizeof(ev_sync));
|
|
||||||
|
|
||||||
ev_click.type = EV_KEY;
|
|
||||||
ev_click.code = BTN_RIGHT;
|
|
||||||
ev_click.value = 1;
|
|
||||||
|
|
||||||
// write right click event
|
|
||||||
write(fd, &ev_click, sizeof(ev_click));
|
|
||||||
|
|
||||||
// sync right click event
|
|
||||||
ev_sync.type = EV_SYN;
|
|
||||||
write(fd, &ev_sync, sizeof(ev_sync));
|
|
||||||
}
|
|
||||||
|
|
||||||
void mouseSetPosition(int x, int y) {
|
|
||||||
struct input_event ev[2], ev_sync;
|
|
||||||
memset(ev, 0, sizeof(ev));
|
|
||||||
memset(&ev_sync, 0, sizeof(ev_sync));
|
|
||||||
|
|
||||||
ev[0].type = EV_ABS;
|
|
||||||
ev[0].code = ABS_X;
|
|
||||||
ev[0].value = x;
|
|
||||||
ev[1].type = EV_ABS;
|
|
||||||
ev[1].code = ABS_Y;
|
|
||||||
ev[1].value = y;
|
|
||||||
|
|
||||||
int res_w = write(fd, ev, sizeof(ev));
|
|
||||||
|
|
||||||
std::cout << "res w : " << res_w << "\n";
|
|
||||||
|
|
||||||
ev_sync.type = EV_SYN;
|
|
||||||
ev_sync.value = 0;
|
|
||||||
ev_sync.code = 0;
|
|
||||||
int res_ev_sync = write(fd, &ev_sync, sizeof(ev_sync));
|
|
||||||
|
|
||||||
std::cout << "res syn : " << res_ev_sync << "\n";
|
|
||||||
}
|
|
||||||
|
|
||||||
void initDisplay() { dpy = XOpenDisplay(NULL); }
|
|
||||||
|
|
||||||
void destroyMouse() { ioctl(fd, UI_DEV_DESTROY); }
|
|
||||||
|
|
||||||
void mouseMove(int xdelta, int ydelta) {
|
|
||||||
int xx, yy;
|
|
||||||
mouseGetPosition(xx, yy);
|
|
||||||
|
|
||||||
mouseSetPosition(xx + xdelta, yy + ydelta);
|
|
||||||
}
|
|
||||||
|
|
||||||
void mouseGetPosition(int &x, int &y) {
|
|
||||||
Window root, child;
|
|
||||||
int rootX, rootY, winX, winY;
|
|
||||||
unsigned int mask;
|
|
||||||
|
|
||||||
XQueryPointer(dpy, DefaultRootWindow(dpy), &root, &child, &rootX, &rootY,
|
|
||||||
&winX, &winY, &mask);
|
|
||||||
|
|
||||||
std::cout << "root x : " << rootX << "\n";
|
|
||||||
std::cout << "root y : " << rootY << "\n";
|
|
||||||
|
|
||||||
x = rootX;
|
|
||||||
y = rootY;
|
|
||||||
}
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
initMouse();
|
|
||||||
initDisplay();
|
|
||||||
|
|
||||||
int tempx, tempy;
|
|
||||||
|
|
||||||
for (int i = 0; i < 5; ++i) {
|
|
||||||
mouseMove(100, 100);
|
|
||||||
sleep(1);
|
|
||||||
std::cout << "i : " << i << "\n";
|
|
||||||
}
|
|
||||||
|
|
||||||
destroyMouse();
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
@@ -1,309 +0,0 @@
|
|||||||
#include <stdio.h>
|
|
||||||
|
|
||||||
#define __STDC_CONSTANT_MACROS
|
|
||||||
|
|
||||||
#ifdef _WIN32
|
|
||||||
// Windows
|
|
||||||
extern "C" {
|
|
||||||
#include <libavcodec/avcodec.h>
|
|
||||||
#include <libavdevice/avdevice.h>
|
|
||||||
#include <libavformat/avformat.h>
|
|
||||||
#include <libavutil/imgutils.h>
|
|
||||||
#include <libswscale/swscale.h>
|
|
||||||
|
|
||||||
#include "SDL2/SDL.h"
|
|
||||||
};
|
|
||||||
#else
|
|
||||||
// Linux...
|
|
||||||
#ifdef __cplusplus
|
|
||||||
extern "C" {
|
|
||||||
#endif
|
|
||||||
#include <SDL2/SDL.h>
|
|
||||||
#include <libavcodec/avcodec.h>
|
|
||||||
#include <libavdevice/avdevice.h>
|
|
||||||
#include <libavformat/avformat.h>
|
|
||||||
#include <libavutil/imgutils.h>
|
|
||||||
#include <libswscale/swscale.h>
|
|
||||||
#ifdef __cplusplus
|
|
||||||
};
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <chrono>
|
|
||||||
|
|
||||||
// Output YUV420P
|
|
||||||
#define OUTPUT_YUV420P 0
|
|
||||||
//'1' Use Dshow
|
|
||||||
//'0' Use GDIgrab
|
|
||||||
#define USE_DSHOW 0
|
|
||||||
|
|
||||||
// Refresh Event
|
|
||||||
#define SFM_REFRESH_EVENT (SDL_USEREVENT + 1)
|
|
||||||
|
|
||||||
#define SFM_BREAK_EVENT (SDL_USEREVENT + 2)
|
|
||||||
|
|
||||||
#define NV12_BUFFER_SIZE 1280 * 720 * 3 / 2
|
|
||||||
|
|
||||||
int thread_exit = 0;
|
|
||||||
SDL_Texture *sdlTexture = nullptr;
|
|
||||||
SDL_Renderer *sdlRenderer = nullptr;
|
|
||||||
SDL_Rect sdlRect;
|
|
||||||
unsigned char nv12_buffer[NV12_BUFFER_SIZE];
|
|
||||||
std::chrono::_V2::system_clock::time_point last_frame_time;
|
|
||||||
const int pixel_w = 1280, pixel_h = 720;
|
|
||||||
int screen_w = 1280, screen_h = 720;
|
|
||||||
bool done = false;
|
|
||||||
|
|
||||||
int YUV420ToNV12FFmpeg(unsigned char *src_buffer, int width, int height,
|
|
||||||
unsigned char *des_buffer) {
|
|
||||||
AVFrame *Input_pFrame = av_frame_alloc();
|
|
||||||
AVFrame *Output_pFrame = av_frame_alloc();
|
|
||||||
struct SwsContext *img_convert_ctx = sws_getContext(
|
|
||||||
width, height, AV_PIX_FMT_NV12, width, height, AV_PIX_FMT_YUV420P,
|
|
||||||
SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
|
|
||||||
|
|
||||||
av_image_fill_arrays(Input_pFrame->data, Input_pFrame->linesize, src_buffer,
|
|
||||||
AV_PIX_FMT_NV12, width, height, 1);
|
|
||||||
av_image_fill_arrays(Output_pFrame->data, Output_pFrame->linesize, des_buffer,
|
|
||||||
AV_PIX_FMT_YUV420P, width, height, 1);
|
|
||||||
|
|
||||||
sws_scale(img_convert_ctx, (uint8_t const **)Input_pFrame->data,
|
|
||||||
Input_pFrame->linesize, 0, height, Output_pFrame->data,
|
|
||||||
Output_pFrame->linesize);
|
|
||||||
|
|
||||||
if (Input_pFrame) av_free(Input_pFrame);
|
|
||||||
if (Output_pFrame) av_free(Output_pFrame);
|
|
||||||
if (img_convert_ctx) sws_freeContext(img_convert_ctx);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int sfp_refresh_thread(void *opaque) {
|
|
||||||
thread_exit = 0;
|
|
||||||
while (!thread_exit) {
|
|
||||||
SDL_Event event;
|
|
||||||
event.type = SFM_REFRESH_EVENT;
|
|
||||||
SDL_PushEvent(&event);
|
|
||||||
SDL_Delay(30);
|
|
||||||
printf("sfp_refresh_thread\n");
|
|
||||||
}
|
|
||||||
thread_exit = 0;
|
|
||||||
// Break
|
|
||||||
SDL_Event event;
|
|
||||||
event.type = SFM_BREAK_EVENT;
|
|
||||||
SDL_PushEvent(&event);
|
|
||||||
printf("exit sfp_refresh_thread\n");
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
int main(int argc, char *argv[]) {
|
|
||||||
AVFormatContext *pFormatCtx;
|
|
||||||
int i, videoindex;
|
|
||||||
AVCodecContext *pCodecCtx;
|
|
||||||
AVCodec *pCodec;
|
|
||||||
AVCodecParameters *pCodecParam;
|
|
||||||
|
|
||||||
// avformat_network_init();
|
|
||||||
pFormatCtx = avformat_alloc_context();
|
|
||||||
|
|
||||||
// Open File
|
|
||||||
char filepath[] = "out.h264";
|
|
||||||
// avformat_open_input(&pFormatCtx, filepath, NULL, NULL);
|
|
||||||
|
|
||||||
// Register Device
|
|
||||||
avdevice_register_all();
|
|
||||||
// Windows
|
|
||||||
|
|
||||||
// Linux
|
|
||||||
AVDictionary *options = NULL;
|
|
||||||
// Set some options
|
|
||||||
// grabbing frame rate
|
|
||||||
av_dict_set(&options, "framerate", "30", 0);
|
|
||||||
// Make the grabbed area follow the mouse
|
|
||||||
// av_dict_set(&options, "follow_mouse", "centered", 0);
|
|
||||||
// Video frame size. The default is to capture the full screen
|
|
||||||
av_dict_set(&options, "video_size", "1280x720", 0);
|
|
||||||
AVInputFormat *ifmt = (AVInputFormat *)av_find_input_format("x11grab");
|
|
||||||
if (!ifmt) {
|
|
||||||
printf("Couldn't find_input_format\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Grab at position 10,20
|
|
||||||
if (avformat_open_input(&pFormatCtx, ":0.0", ifmt, &options) != 0) {
|
|
||||||
printf("Couldn't open input stream.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
|
|
||||||
printf("Couldn't find stream information.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
videoindex = -1;
|
|
||||||
for (i = 0; i < pFormatCtx->nb_streams; i++)
|
|
||||||
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
|
||||||
videoindex = i;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (videoindex == -1) {
|
|
||||||
printf("Didn't find a video stream.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
pCodecParam = pFormatCtx->streams[videoindex]->codecpar;
|
|
||||||
|
|
||||||
pCodecCtx = avcodec_alloc_context3(NULL);
|
|
||||||
avcodec_parameters_to_context(pCodecCtx, pCodecParam);
|
|
||||||
|
|
||||||
pCodec = const_cast<AVCodec *>(avcodec_find_decoder(pCodecCtx->codec_id));
|
|
||||||
if (pCodec == NULL) {
|
|
||||||
printf("Codec not found.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
|
|
||||||
printf("Could not open codec.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
AVFrame *pFrame, *pFrameYUV, *pFrameNV12;
|
|
||||||
pFrame = av_frame_alloc();
|
|
||||||
pFrameYUV = av_frame_alloc();
|
|
||||||
pFrameNV12 = av_frame_alloc();
|
|
||||||
// unsigned char *out_buffer=(unsigned char
|
|
||||||
// *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width,
|
|
||||||
// pCodecCtx->height)); avpicture_fill((AVPicture *)pFrameYUV, out_buffer,
|
|
||||||
// AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
|
|
||||||
// SDL----------------------------
|
|
||||||
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
|
|
||||||
printf("Could not initialize SDL - %s\n", SDL_GetError());
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// const SDL_VideoInfo *vi = SDL_GetVideoInfo();
|
|
||||||
// Half of the Desktop's width and height.
|
|
||||||
screen_w = 1280;
|
|
||||||
screen_h = 720;
|
|
||||||
// SDL_Surface *screen;
|
|
||||||
// screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
|
|
||||||
SDL_Window *screen;
|
|
||||||
screen = SDL_CreateWindow("Linux Capture", SDL_WINDOWPOS_UNDEFINED,
|
|
||||||
SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h,
|
|
||||||
SDL_WINDOW_RESIZABLE);
|
|
||||||
|
|
||||||
if (!screen) {
|
|
||||||
printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
// SDL_Overlay *bmp;
|
|
||||||
// bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height,
|
|
||||||
// SDL_YV12_OVERLAY, screen);
|
|
||||||
|
|
||||||
sdlRenderer = SDL_CreateRenderer(screen, -1, SDL_RENDERER_ACCELERATED);
|
|
||||||
|
|
||||||
Uint32 pixformat = 0;
|
|
||||||
pixformat = SDL_PIXELFORMAT_NV12;
|
|
||||||
|
|
||||||
SDL_Texture *sdlTexture = nullptr;
|
|
||||||
sdlTexture = SDL_CreateTexture(sdlRenderer, pixformat,
|
|
||||||
SDL_TEXTUREACCESS_STREAMING, pixel_w, pixel_h);
|
|
||||||
|
|
||||||
SDL_Rect rect;
|
|
||||||
rect.x = 0;
|
|
||||||
rect.y = 0;
|
|
||||||
rect.w = screen_w;
|
|
||||||
rect.h = screen_h;
|
|
||||||
// SDL End------------------------
|
|
||||||
int ret, got_picture;
|
|
||||||
|
|
||||||
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
|
|
||||||
|
|
||||||
struct SwsContext *img_convert_ctx;
|
|
||||||
img_convert_ctx = sws_getContext(
|
|
||||||
pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width,
|
|
||||||
pCodecCtx->height, AV_PIX_FMT_NV12, SWS_BICUBIC, NULL, NULL, NULL);
|
|
||||||
//------------------------------
|
|
||||||
SDL_Thread *video_tid = SDL_CreateThread(sfp_refresh_thread, NULL, NULL);
|
|
||||||
//
|
|
||||||
// SDL_WM_SetCaption("Simplest FFmpeg Grab Desktop", NULL);
|
|
||||||
// Event Loop
|
|
||||||
SDL_Event event;
|
|
||||||
|
|
||||||
last_frame_time = std::chrono::steady_clock::now();
|
|
||||||
|
|
||||||
for (;;) {
|
|
||||||
// Wait
|
|
||||||
SDL_WaitEvent(&event);
|
|
||||||
|
|
||||||
if (1) {
|
|
||||||
//------------------------------
|
|
||||||
if (av_read_frame(pFormatCtx, packet) >= 0) {
|
|
||||||
if (packet->stream_index == videoindex) {
|
|
||||||
avcodec_send_packet(pCodecCtx, packet);
|
|
||||||
av_packet_unref(packet);
|
|
||||||
got_picture = avcodec_receive_frame(pCodecCtx, pFrame);
|
|
||||||
// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture,
|
|
||||||
// packet);
|
|
||||||
if (ret < 0) {
|
|
||||||
printf("Decode Error.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
printf("xxxxxxxxxxxxxxxxxxx\n");
|
|
||||||
if (!got_picture) {
|
|
||||||
auto now_time = std::chrono::steady_clock::now();
|
|
||||||
std::chrono::duration<double> duration = now_time - last_frame_time;
|
|
||||||
auto tc = duration.count() * 1000;
|
|
||||||
printf("duration: %f\n", tc);
|
|
||||||
last_frame_time = now_time;
|
|
||||||
|
|
||||||
av_image_fill_arrays(pFrameNV12->data, pFrameNV12->linesize,
|
|
||||||
nv12_buffer, AV_PIX_FMT_NV12, pFrame->width,
|
|
||||||
pFrame->height, 1);
|
|
||||||
|
|
||||||
sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0,
|
|
||||||
pFrame->height, pFrameNV12->data, pFrameNV12->linesize);
|
|
||||||
|
|
||||||
SDL_UpdateTexture(sdlTexture, NULL, nv12_buffer, pixel_w);
|
|
||||||
|
|
||||||
// FIX: If window is resize
|
|
||||||
sdlRect.x = 0;
|
|
||||||
sdlRect.y = 0;
|
|
||||||
sdlRect.w = screen_w;
|
|
||||||
sdlRect.h = screen_h;
|
|
||||||
|
|
||||||
SDL_RenderClear(sdlRenderer);
|
|
||||||
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
|
|
||||||
SDL_RenderPresent(sdlRenderer);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// av_free_packet(packet);
|
|
||||||
} else {
|
|
||||||
// Exit Thread
|
|
||||||
// thread_exit = 1;
|
|
||||||
// printf("No frame read\n");
|
|
||||||
}
|
|
||||||
} else if (event.type == SDL_QUIT) {
|
|
||||||
printf("SDL_QUIT\n");
|
|
||||||
thread_exit = 1;
|
|
||||||
} else if (event.type == SFM_BREAK_EVENT) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sws_freeContext(img_convert_ctx);
|
|
||||||
|
|
||||||
#if OUTPUT_YUV420P
|
|
||||||
fclose(fp_yuv);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
SDL_Quit();
|
|
||||||
|
|
||||||
// av_free(out_buffer);
|
|
||||||
av_frame_free(&pFrameNV12);
|
|
||||||
av_free(pFrameYUV);
|
|
||||||
avcodec_close(pCodecCtx);
|
|
||||||
avformat_close_input(&pFormatCtx);
|
|
||||||
|
|
||||||
getchar();
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
@@ -1,286 +0,0 @@
|
|||||||
#include <stdio.h>
|
|
||||||
|
|
||||||
#define __STDC_CONSTANT_MACROS
|
|
||||||
|
|
||||||
#ifdef _WIN32
|
|
||||||
// Windows
|
|
||||||
extern "C" {
|
|
||||||
#include "SDL/SDL.h"
|
|
||||||
#include "libavcodec/avcodec.h"
|
|
||||||
#include "libavdevice/avdevice.h"
|
|
||||||
#include "libavformat/avformat.h"
|
|
||||||
#include "libswscale/swscale.h"
|
|
||||||
};
|
|
||||||
#else
|
|
||||||
// Linux...
|
|
||||||
#ifdef __cplusplus
|
|
||||||
extern "C" {
|
|
||||||
#endif
|
|
||||||
#include <SDL2/SDL.h>
|
|
||||||
#include <libavcodec/avcodec.h>
|
|
||||||
#include <libavdevice/avdevice.h>
|
|
||||||
#include <libavformat/avformat.h>
|
|
||||||
#include <libswscale/swscale.h>
|
|
||||||
#ifdef __cplusplus
|
|
||||||
};
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// Output YUV420P
|
|
||||||
#define OUTPUT_YUV420P 0
|
|
||||||
//'1' Use Dshow
|
|
||||||
//'0' Use GDIgrab
|
|
||||||
#define USE_DSHOW 0
|
|
||||||
|
|
||||||
// Refresh Event
|
|
||||||
#define SFM_REFRESH_EVENT (SDL_USEREVENT + 1)
|
|
||||||
|
|
||||||
#define SFM_BREAK_EVENT (SDL_USEREVENT + 2)
|
|
||||||
|
|
||||||
int thread_exit = 0;
|
|
||||||
|
|
||||||
int sfp_refresh_thread(void *opaque) {
|
|
||||||
thread_exit = 0;
|
|
||||||
while (!thread_exit) {
|
|
||||||
SDL_Event event;
|
|
||||||
event.type = SFM_REFRESH_EVENT;
|
|
||||||
SDL_PushEvent(&event);
|
|
||||||
SDL_Delay(40);
|
|
||||||
}
|
|
||||||
thread_exit = 0;
|
|
||||||
// Break
|
|
||||||
SDL_Event event;
|
|
||||||
event.type = SFM_BREAK_EVENT;
|
|
||||||
SDL_PushEvent(&event);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Show AVFoundation Device
|
|
||||||
void show_avfoundation_device() {
|
|
||||||
AVFormatContext *pFormatCtx = avformat_alloc_context();
|
|
||||||
AVDictionary *options = NULL;
|
|
||||||
av_dict_set(&options, "list_devices", "true", 0);
|
|
||||||
AVInputFormat *iformat =
|
|
||||||
(AVInputFormat *)av_find_input_format("avfoundation");
|
|
||||||
printf("==AVFoundation Device Info===\n");
|
|
||||||
avformat_open_input(&pFormatCtx, "", iformat, &options);
|
|
||||||
printf("=============================\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
int main(int argc, char *argv[]) {
|
|
||||||
AVFormatContext *pFormatCtx;
|
|
||||||
int i, videoindex;
|
|
||||||
AVCodecContext *pCodecCtx;
|
|
||||||
AVCodec *pCodec;
|
|
||||||
|
|
||||||
avformat_network_init();
|
|
||||||
pFormatCtx = avformat_alloc_context();
|
|
||||||
|
|
||||||
// Open File
|
|
||||||
// char filepath[]="src01_480x272_22.h265";
|
|
||||||
// avformat_open_input(&pFormatCtx,filepath,NULL,NULL)
|
|
||||||
|
|
||||||
// Register Device
|
|
||||||
avdevice_register_all();
|
|
||||||
// Windows
|
|
||||||
#ifdef _WIN32
|
|
||||||
#if USE_DSHOW
|
|
||||||
// Use dshow
|
|
||||||
//
|
|
||||||
// Need to Install screen-capture-recorder
|
|
||||||
// screen-capture-recorder
|
|
||||||
// Website: http://sourceforge.net/projects/screencapturer/
|
|
||||||
//
|
|
||||||
AVInputFormat *ifmt = av_find_input_format("dshow");
|
|
||||||
if (avformat_open_input(&pFormatCtx, "video=screen-capture-recorder", ifmt,
|
|
||||||
NULL) != 0) {
|
|
||||||
printf("Couldn't open input stream.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
// Use gdigrab
|
|
||||||
AVDictionary *options = NULL;
|
|
||||||
// Set some options
|
|
||||||
// grabbing frame rate
|
|
||||||
// av_dict_set(&options,"framerate","5",0);
|
|
||||||
// The distance from the left edge of the screen or desktop
|
|
||||||
// av_dict_set(&options,"offset_x","20",0);
|
|
||||||
// The distance from the top edge of the screen or desktop
|
|
||||||
// av_dict_set(&options,"offset_y","40",0);
|
|
||||||
// Video frame size. The default is to capture the full screen
|
|
||||||
// av_dict_set(&options,"video_size","640x480",0);
|
|
||||||
AVInputFormat *ifmt = av_find_input_format("gdigrab");
|
|
||||||
if (avformat_open_input(&pFormatCtx, "desktop", ifmt, &options) != 0) {
|
|
||||||
printf("Couldn't open input stream.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
||||||
#elif defined linux
|
|
||||||
// Linux
|
|
||||||
AVDictionary *options = NULL;
|
|
||||||
// Set some options
|
|
||||||
// grabbing frame rate
|
|
||||||
// av_dict_set(&options,"framerate","5",0);
|
|
||||||
// Make the grabbed area follow the mouse
|
|
||||||
// av_dict_set(&options,"follow_mouse","centered",0);
|
|
||||||
// Video frame size. The default is to capture the full screen
|
|
||||||
// av_dict_set(&options,"video_size","640x480",0);
|
|
||||||
AVInputFormat *ifmt = av_find_input_format("x11grab");
|
|
||||||
// Grab at position 10,20
|
|
||||||
if (avformat_open_input(&pFormatCtx, ":0.0+10,20", ifmt, &options) != 0) {
|
|
||||||
printf("Couldn't open input stream.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
show_avfoundation_device();
|
|
||||||
// Mac
|
|
||||||
AVInputFormat *ifmt = (AVInputFormat *)av_find_input_format("avfoundation");
|
|
||||||
// Avfoundation
|
|
||||||
//[video]:[audio]
|
|
||||||
if (avformat_open_input(&pFormatCtx, "1", ifmt, NULL) != 0) {
|
|
||||||
printf("Couldn't open input stream.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
|
|
||||||
printf("Couldn't find stream information.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
videoindex = -1;
|
|
||||||
for (i = 0; i < pFormatCtx->nb_streams; i++)
|
|
||||||
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
|
|
||||||
videoindex = i;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (videoindex == -1) {
|
|
||||||
printf("Didn't find a video stream.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
|
|
||||||
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
|
|
||||||
if (pCodec == NULL) {
|
|
||||||
printf("Codec not found.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
|
|
||||||
printf("Could not open codec.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
AVFrame *pFrame, *pFrameYUV;
|
|
||||||
pFrame = av_frame_alloc();
|
|
||||||
pFrameYUV = av_frame_alloc();
|
|
||||||
// unsigned char *out_buffer=(unsigned char
|
|
||||||
// *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width,
|
|
||||||
// pCodecCtx->height)); avpicture_fill((AVPicture *)pFrameYUV, out_buffer,
|
|
||||||
// AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
|
|
||||||
// SDL----------------------------
|
|
||||||
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
|
|
||||||
printf("Could not initialize SDL - %s\n", SDL_GetError());
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
int screen_w = 640, screen_h = 360;
|
|
||||||
const SDL_VideoInfo *vi = SDL_GetVideoInfo();
|
|
||||||
// Half of the Desktop's width and height.
|
|
||||||
screen_w = vi->current_w / 2;
|
|
||||||
screen_h = vi->current_h / 2;
|
|
||||||
SDL_Surface *screen;
|
|
||||||
screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
|
|
||||||
|
|
||||||
if (!screen) {
|
|
||||||
printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
SDL_Overlay *bmp;
|
|
||||||
bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height,
|
|
||||||
SDL_YV12_OVERLAY, screen);
|
|
||||||
SDL_Rect rect;
|
|
||||||
rect.x = 0;
|
|
||||||
rect.y = 0;
|
|
||||||
rect.w = screen_w;
|
|
||||||
rect.h = screen_h;
|
|
||||||
// SDL End------------------------
|
|
||||||
int ret, got_picture;
|
|
||||||
|
|
||||||
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
|
|
||||||
|
|
||||||
#if OUTPUT_YUV420P
|
|
||||||
FILE *fp_yuv = fopen("output.yuv", "wb+");
|
|
||||||
#endif
|
|
||||||
|
|
||||||
struct SwsContext *img_convert_ctx;
|
|
||||||
img_convert_ctx = sws_getContext(
|
|
||||||
pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width,
|
|
||||||
pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
|
|
||||||
//------------------------------
|
|
||||||
SDL_Thread *video_tid = SDL_CreateThread(sfp_refresh_thread, NULL);
|
|
||||||
//
|
|
||||||
SDL_WM_SetCaption("Simplest FFmpeg Grab Desktop", NULL);
|
|
||||||
// Event Loop
|
|
||||||
SDL_Event event;
|
|
||||||
|
|
||||||
for (;;) {
|
|
||||||
// Wait
|
|
||||||
SDL_WaitEvent(&event);
|
|
||||||
if (event.type == SFM_REFRESH_EVENT) {
|
|
||||||
//------------------------------
|
|
||||||
if (av_read_frame(pFormatCtx, packet) >= 0) {
|
|
||||||
if (packet->stream_index == videoindex) {
|
|
||||||
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
|
|
||||||
if (ret < 0) {
|
|
||||||
printf("Decode Error.\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (got_picture) {
|
|
||||||
SDL_LockYUVOverlay(bmp);
|
|
||||||
pFrameYUV->data[0] = bmp->pixels[0];
|
|
||||||
pFrameYUV->data[1] = bmp->pixels[2];
|
|
||||||
pFrameYUV->data[2] = bmp->pixels[1];
|
|
||||||
pFrameYUV->linesize[0] = bmp->pitches[0];
|
|
||||||
pFrameYUV->linesize[1] = bmp->pitches[2];
|
|
||||||
pFrameYUV->linesize[2] = bmp->pitches[1];
|
|
||||||
sws_scale(img_convert_ctx,
|
|
||||||
(const unsigned char *const *)pFrame->data,
|
|
||||||
pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data,
|
|
||||||
pFrameYUV->linesize);
|
|
||||||
|
|
||||||
#if OUTPUT_YUV420P
|
|
||||||
int y_size = pCodecCtx->width * pCodecCtx->height;
|
|
||||||
fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); // Y
|
|
||||||
fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); // U
|
|
||||||
fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); // V
|
|
||||||
#endif
|
|
||||||
SDL_UnlockYUVOverlay(bmp);
|
|
||||||
|
|
||||||
SDL_DisplayYUVOverlay(bmp, &rect);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
av_free_packet(packet);
|
|
||||||
} else {
|
|
||||||
// Exit Thread
|
|
||||||
thread_exit = 1;
|
|
||||||
}
|
|
||||||
} else if (event.type == SDL_QUIT) {
|
|
||||||
thread_exit = 1;
|
|
||||||
} else if (event.type == SFM_BREAK_EVENT) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sws_freeContext(img_convert_ctx);
|
|
||||||
|
|
||||||
#if OUTPUT_YUV420P
|
|
||||||
fclose(fp_yuv);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
SDL_Quit();
|
|
||||||
|
|
||||||
// av_free(out_buffer);
|
|
||||||
av_free(pFrameYUV);
|
|
||||||
avcodec_close(pCodecCtx);
|
|
||||||
avformat_close_input(&pFormatCtx);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
208
thirdparty/ffmpeg/xmake.lua
vendored
208
thirdparty/ffmpeg/xmake.lua
vendored
@@ -1,208 +0,0 @@
|
|||||||
package("ffmpeg")
|
|
||||||
|
|
||||||
set_homepage("https://www.ffmpeg.org")
|
|
||||||
set_description("A collection of libraries to process multimedia content such as audio, video, subtitles and related metadata.")
|
|
||||||
set_license("LGPL-3.0")
|
|
||||||
|
|
||||||
if is_plat("windows", "mingw") then
|
|
||||||
add_urls("https://www.gyan.dev/ffmpeg/builds/packages/ffmpeg-$(version)-full_build-shared.7z")
|
|
||||||
add_versions("5.1.2", "d9eb97b72d7cfdae4d0f7eaea59ccffb8c364d67d88018ea715d5e2e193f00e9")
|
|
||||||
add_versions("5.0.1", "ded28435b6f04b74f5ef5a6a13761233bce9e8e9f8ecb0eabe936fd36a778b0c")
|
|
||||||
|
|
||||||
add_configs("shared", {description = "Download shared binaries.", default = true, type = "boolean", readonly = true})
|
|
||||||
add_configs("vs_runtime", {description = "Set vs compiler runtime.", default = "MD", readonly = true})
|
|
||||||
else
|
|
||||||
add_urls("https://ffmpeg.org/releases/ffmpeg-$(version).tar.bz2", {alias = "home"})
|
|
||||||
add_urls("https://github.com/FFmpeg/FFmpeg/archive/n$(version).zip", {alias = "github"})
|
|
||||||
add_urls("https://git.ffmpeg.org/ffmpeg.git", "https://github.com/FFmpeg/FFmpeg.git", {alias = "git"})
|
|
||||||
add_versions("home:5.1.2", "39a0bcc8d98549f16c570624678246a6ac736c066cebdb409f9502e915b22f2b")
|
|
||||||
add_versions("home:5.1.1", "cd0e16f903421266d5ccddedf7b83b9e5754aef4b9f7a7f06ce9e4c802f0545b")
|
|
||||||
add_versions("home:5.0.1", "28df33d400a1c1c1b20d07a99197809a3b88ef765f5f07dc1ff067fac64c59d6")
|
|
||||||
add_versions("home:4.0.2", "346c51735f42c37e0712e0b3d2f6476c86ac15863e4445d9e823fe396420d056")
|
|
||||||
add_versions("github:5.1.2", "0c99f3609160f40946e2531804175eea16416320c4b6365ad075e390600539db")
|
|
||||||
add_versions("github:5.1.1", "a886fcc94792764c27c88ebe71dffbe5f0d37df8f06f01efac4833ac080c11bf")
|
|
||||||
add_versions("github:5.0.1", "f9c2e06cafa4381df8d5c9c9e14d85d9afcbc10c516c6a206f821997cc7f6440")
|
|
||||||
add_versions("github:4.0.2", "4df1ef0bf73b7148caea1270539ef7bd06607e0ea8aa2fbf1bb34062a097f026")
|
|
||||||
add_versions("git:5.1.2", "n5.1.2")
|
|
||||||
add_versions("git:5.1.1", "n5.1.1")
|
|
||||||
add_versions("git:5.0.1", "n5.0.1")
|
|
||||||
add_versions("git:4.0.2", "n4.0.2")
|
|
||||||
|
|
||||||
add_configs("gpl", {description = "Enable GPL code", default = false, type = "boolean"})
|
|
||||||
add_configs("ffprobe", {description = "Enable ffprobe program.", default = false, type = "boolean"})
|
|
||||||
add_configs("ffmpeg", {description = "Enable ffmpeg program.", default = true, type = "boolean"})
|
|
||||||
add_configs("ffplay", {description = "Enable ffplay program.", default = false, type = "boolean"})
|
|
||||||
add_configs("zlib", {description = "Enable zlib compression library.", default = false, type = "boolean"})
|
|
||||||
add_configs("lzma", {description = "Enable liblzma compression library.", default = false, type = "boolean"})
|
|
||||||
add_configs("bzlib", {description = "Enable bzlib compression library.", default = false, type = "boolean"})
|
|
||||||
add_configs("libx264", {description = "Enable libx264 decoder.", default = false, type = "boolean"})
|
|
||||||
add_configs("libx265", {description = "Enable libx265 decoder.", default = false, type = "boolean"})
|
|
||||||
add_configs("iconv", {description = "Enable libiconv library.", default = false, type = "boolean"})
|
|
||||||
add_configs("vaapi", {description = "Enable vaapi library.", default = false, type = "boolean"})
|
|
||||||
add_configs("vdpau", {description = "Enable vdpau library.", default = false, type = "boolean"})
|
|
||||||
add_configs("hardcoded-tables", {description = "Enable hardcoded tables.", default = true, type = "boolean"})
|
|
||||||
add_configs("asm", {description = "Enable asm", default = false, type = "boolean"})
|
|
||||||
add_configs("libopenh264", {description = "Enable libopenh264", default = false, type = "boolean"})
|
|
||||||
add_configs("libxcb", {description = "Enable libxcb", default = true, type = "boolean"})
|
|
||||||
end
|
|
||||||
|
|
||||||
add_links("avfilter", "avdevice", "avformat", "avcodec", "swscale", "swresample", "avutil", "postproc")
|
|
||||||
if is_plat("macosx") then
|
|
||||||
add_frameworks("CoreFoundation", "Foundation", "CoreVideo", "CoreMedia", "AudioToolbox", "VideoToolbox", "Security")
|
|
||||||
elseif is_plat("linux") then
|
|
||||||
-- add_syslinks("pthread", "openh264")
|
|
||||||
add_syslinks("pthread")
|
|
||||||
end
|
|
||||||
|
|
||||||
if is_plat("linux", "macosx") then
|
|
||||||
add_deps("yasm")
|
|
||||||
end
|
|
||||||
|
|
||||||
if on_fetch then
|
|
||||||
on_fetch("mingw", "linux", "macosx", function (package, opt)
|
|
||||||
import("lib.detect.find_tool")
|
|
||||||
if opt.system then
|
|
||||||
local result
|
|
||||||
for _, name in ipairs({"libavcodec", "libavdevice", "libavfilter", "libavformat", "libavutil", "libpostproc", "libswresample", "libswscale"}) do
|
|
||||||
local pkginfo = package:find_package("pkgconfig::" .. name, opt)
|
|
||||||
if pkginfo then
|
|
||||||
pkginfo.version = nil
|
|
||||||
if not result then
|
|
||||||
result = pkginfo
|
|
||||||
else
|
|
||||||
result = result .. pkginfo
|
|
||||||
end
|
|
||||||
else
|
|
||||||
return
|
|
||||||
end
|
|
||||||
end
|
|
||||||
local ffmpeg = find_tool("ffmpeg", {check = "-help", version = true, command = "-version", parse = "%d+%.?%d+%.?%d+", force = true})
|
|
||||||
if ffmpeg then
|
|
||||||
result.version = ffmpeg.version
|
|
||||||
end
|
|
||||||
return result
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
on_load("linux", "macosx", "android", function (package)
|
|
||||||
local configdeps = {zlib = "zlib",
|
|
||||||
bzlib = "bzip2",
|
|
||||||
lzma = "xz",
|
|
||||||
libx264 = "x264",
|
|
||||||
libx265 = "x265",
|
|
||||||
iconv = "libiconv"}
|
|
||||||
for name, dep in pairs(configdeps) do
|
|
||||||
if package:config(name) then
|
|
||||||
package:add("deps", dep)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
-- https://www.ffmpeg.org/platform.html#toc-Advanced-linking-configuration
|
|
||||||
if package:config("pic") ~= false and not package:is_plat("macosx") then
|
|
||||||
package:add("shflags", "-Wl,-Bsymbolic")
|
|
||||||
package:add("ldflags", "-Wl,-Bsymbolic")
|
|
||||||
end
|
|
||||||
if not package:config("gpl") then
|
|
||||||
package:set("license", "LGPL-3.0")
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
on_install("windows|x64", "mingw|x86_64", function (package)
|
|
||||||
os.cp("bin", package:installdir())
|
|
||||||
os.cp("include", package:installdir())
|
|
||||||
os.cp("lib", package:installdir())
|
|
||||||
package:addenv("PATH", "bin")
|
|
||||||
end)
|
|
||||||
|
|
||||||
on_install("linux", "macosx", "android@linux,macosx", function (package)
|
|
||||||
local configs = {"--enable-version3",
|
|
||||||
"--disable-doc"}
|
|
||||||
if package:config("gpl") then
|
|
||||||
table.insert(configs, "--enable-gpl")
|
|
||||||
end
|
|
||||||
if package:is_plat("macosx") and macos.version():ge("10.8") then
|
|
||||||
table.insert(configs, "--enable-videotoolbox")
|
|
||||||
end
|
|
||||||
for name, enabled in pairs(package:configs()) do
|
|
||||||
if not package:extraconf("configs", name, "builtin") then
|
|
||||||
if enabled then
|
|
||||||
table.insert(configs, "--enable-" .. name)
|
|
||||||
else
|
|
||||||
table.insert(configs, "--disable-" .. name)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
if package:config("shared") then
|
|
||||||
table.insert(configs, "--enable-shared")
|
|
||||||
table.insert(configs, "--disable-static")
|
|
||||||
else
|
|
||||||
table.insert(configs, "--enable-static")
|
|
||||||
table.insert(configs, "--disable-shared")
|
|
||||||
end
|
|
||||||
if package:debug() then
|
|
||||||
table.insert(configs, "--enable-debug")
|
|
||||||
else
|
|
||||||
table.insert(configs, "--disable-debug")
|
|
||||||
end
|
|
||||||
if package:is_plat("android") then
|
|
||||||
import("core.base.option")
|
|
||||||
import("core.tool.toolchain")
|
|
||||||
local ndk = toolchain.load("ndk", {plat = package:plat(), arch = package:arch()})
|
|
||||||
local bin = ndk:bindir()
|
|
||||||
local ndk_sdkver = ndk:config("ndk_sdkver")
|
|
||||||
local arch, cpu, triple, cross_prefix
|
|
||||||
if package:is_arch("arm64-v8a") then
|
|
||||||
arch = "arm64"
|
|
||||||
cpu = "armv8-a"
|
|
||||||
triple = "aarch64-linux-android"
|
|
||||||
cross_prefix = path.join(bin, "aarch64-linux-android-")
|
|
||||||
elseif package:arch():startswith("arm") then
|
|
||||||
arch = "arm"
|
|
||||||
cpu = "armv7-a"
|
|
||||||
triple = "armv7a-linux-androideabi"
|
|
||||||
cross_prefix = path.join(bin, "arm-linux-androideabi-")
|
|
||||||
else
|
|
||||||
raise("unknown arch(%s) for android!", package:arch())
|
|
||||||
end
|
|
||||||
local sysroot = path.join(path.directory(bin), "sysroot")
|
|
||||||
local cflags = table.join(table.wrap(package:config("cxflags")), table.wrap(package:config("cflags")), table.wrap(get_config("cxflags")), get_config("cflags"))
|
|
||||||
local cxxflags = table.join(table.wrap(package:config("cxflags")), table.wrap(package:config("cxxflags")), table.wrap(get_config("cxflags")), get_config("cxxflags"))
|
|
||||||
assert(os.isdir(sysroot), "we do not support old version ndk!")
|
|
||||||
if package:is_arch("arm64-v8a") then
|
|
||||||
table.insert(cflags, "-mfpu=neon")
|
|
||||||
table.insert(cflags, "-mfloat-abi=soft")
|
|
||||||
else
|
|
||||||
table.insert(cflags, "-mfpu=neon")
|
|
||||||
table.insert(cflags, "-mfloat-abi=soft")
|
|
||||||
end
|
|
||||||
table.insert(configs, "--enable-neon")
|
|
||||||
table.insert(configs, "--enable-asm")
|
|
||||||
table.insert(configs, "--enable-jni")
|
|
||||||
table.insert(configs, "--target-os=android")
|
|
||||||
table.insert(configs, "--enable-cross-compile")
|
|
||||||
table.insert(configs, "--disable-avdevice")
|
|
||||||
table.insert(configs, "--arch=" .. arch)
|
|
||||||
table.insert(configs, "--cpu=" .. cpu)
|
|
||||||
table.insert(configs, "--cc=" .. path.join(bin, triple .. ndk_sdkver .. "-clang"))
|
|
||||||
table.insert(configs, "--cxx=" .. path.join(bin, triple .. ndk_sdkver .. "-clang++"))
|
|
||||||
table.insert(configs, "--extra-cflags=" .. table.concat(cflags, ' '))
|
|
||||||
table.insert(configs, "--extra-cxxflags=" .. table.concat(cxxflags, ' '))
|
|
||||||
table.insert(configs, "--sysroot=" .. sysroot)
|
|
||||||
table.insert(configs, "--cross-prefix=" .. cross_prefix)
|
|
||||||
table.insert(configs, "--prefix=" .. package:installdir())
|
|
||||||
os.vrunv("./configure", configs)
|
|
||||||
local argv = {"-j4"}
|
|
||||||
if option.get("verbose") then
|
|
||||||
table.insert(argv, "V=1")
|
|
||||||
end
|
|
||||||
os.vrunv("make", argv)
|
|
||||||
os.vrun("make install")
|
|
||||||
else
|
|
||||||
import("package.tools.autoconf").install(package, configs)
|
|
||||||
end
|
|
||||||
package:addenv("PATH", "bin")
|
|
||||||
end)
|
|
||||||
|
|
||||||
on_test(function (package)
|
|
||||||
assert(package:has_cfuncs("avformat_open_input", {includes = "libavformat/avformat.h"}))
|
|
||||||
end)
|
|
||||||
8
thirdparty/xmake.lua
vendored
8
thirdparty/xmake.lua
vendored
@@ -1,7 +1 @@
|
|||||||
includes("projectx")
|
includes("projectx")
|
||||||
if is_plat("windows") then
|
|
||||||
elseif is_plat("linux") then
|
|
||||||
includes("ffmpeg")
|
|
||||||
elseif is_plat("macosx") then
|
|
||||||
includes("ffmpeg")
|
|
||||||
end
|
|
||||||
96
xmake.lua
96
xmake.lua
@@ -33,13 +33,11 @@ elseif is_os("linux") then
|
|||||||
add_links("SDL2", "cuda", "nvidia-encode", "nvcuvid", "X11", "Xtst")
|
add_links("SDL2", "cuda", "nvidia-encode", "nvcuvid", "X11", "Xtst")
|
||||||
add_cxflags("-Wno-unused-variable")
|
add_cxflags("-Wno-unused-variable")
|
||||||
elseif is_os("macosx") then
|
elseif is_os("macosx") then
|
||||||
add_requires("ffmpeg 5.1.2", {system = false})
|
|
||||||
add_requires("libxcb", {system = false})
|
|
||||||
add_packages("libxcb")
|
|
||||||
add_links("SDL2", "SDL2main")
|
add_links("SDL2", "SDL2main")
|
||||||
add_ldflags("-Wl,-ld_classic")
|
add_ldflags("-Wl,-ld_classic")
|
||||||
add_cxflags("-Wno-unused-variable")
|
add_cxflags("-Wno-unused-variable")
|
||||||
add_frameworks("OpenGL", "IOSurface", "ScreenCaptureKit")
|
add_frameworks("OpenGL", "IOSurface", "ScreenCaptureKit", "AVFoundation",
|
||||||
|
"CoreMedia", "CoreVideo")
|
||||||
end
|
end
|
||||||
|
|
||||||
add_packages("spdlog", "imgui")
|
add_packages("spdlog", "imgui")
|
||||||
@@ -67,14 +65,11 @@ target("screen_capturer")
|
|||||||
add_files("src/screen_capturer/windows/*.cpp")
|
add_files("src/screen_capturer/windows/*.cpp")
|
||||||
add_includedirs("src/screen_capturer/windows", {public = true})
|
add_includedirs("src/screen_capturer/windows", {public = true})
|
||||||
elseif is_os("macosx") then
|
elseif is_os("macosx") then
|
||||||
add_packages("ffmpeg")
|
add_files("src/screen_capturer/macosx/*.cpp",
|
||||||
add_files("src/screen_capturer/macosx/avfoundation/*.cpp",
|
"src/screen_capturer/macosx/*.mm")
|
||||||
"src/screen_capturer/macosx/screen_capturer_kit/*.cpp",
|
add_includedirs("src/screen_capturer/macosx", {public = true})
|
||||||
"src/screen_capturer/macosx/screen_capturer_kit/*.mm")
|
|
||||||
add_includedirs("src/screen_capturer/macosx/avfoundation",
|
|
||||||
"src/screen_capturer/macosx/screen_capturer_kit", {public = true})
|
|
||||||
elseif is_os("linux") then
|
elseif is_os("linux") then
|
||||||
add_packages("libyuv", "ffmpeg")
|
add_packages("libyuv")
|
||||||
add_files("src/screen_capturer/linux/*.cpp")
|
add_files("src/screen_capturer/linux/*.cpp")
|
||||||
add_includedirs("src/screen_capturer/linux", {public = true})
|
add_includedirs("src/screen_capturer/linux", {public = true})
|
||||||
end
|
end
|
||||||
@@ -129,12 +124,8 @@ target("localization")
|
|||||||
target("single_window")
|
target("single_window")
|
||||||
set_kind("object")
|
set_kind("object")
|
||||||
add_packages("libyuv", "openssl3")
|
add_packages("libyuv", "openssl3")
|
||||||
add_deps("rd_log", "common", "localization", "config_center", "projectx", "screen_capturer", "speaker_capturer", "device_controller")
|
add_deps("rd_log", "common", "localization", "config_center", "projectx",
|
||||||
if is_os("macosx") then
|
"screen_capturer", "speaker_capturer", "device_controller")
|
||||||
add_packages("ffmpeg")
|
|
||||||
elseif is_os("linux") then
|
|
||||||
add_packages("ffmpeg")
|
|
||||||
end
|
|
||||||
add_files("src/single_window/*.cpp")
|
add_files("src/single_window/*.cpp")
|
||||||
add_includedirs("src/single_window", {public = true})
|
add_includedirs("src/single_window", {public = true})
|
||||||
add_includedirs("fonts", {public = true})
|
add_includedirs("fonts", {public = true})
|
||||||
@@ -145,77 +136,8 @@ target("remote_desk")
|
|||||||
if is_os("windows") then
|
if is_os("windows") then
|
||||||
add_files("icon/app.rc")
|
add_files("icon/app.rc")
|
||||||
elseif is_os("macosx") then
|
elseif is_os("macosx") then
|
||||||
add_packages("ffmpeg")
|
|
||||||
-- add_rules("xcode.application")
|
-- add_rules("xcode.application")
|
||||||
-- add_files("Info.plist")
|
-- add_files("Info.plist")
|
||||||
elseif is_os("linux") then
|
elseif is_os("linux") then
|
||||||
add_packages("ffmpeg")
|
|
||||||
end
|
end
|
||||||
add_files("src/gui/main.cpp")
|
add_files("src/gui/main.cpp")
|
||||||
|
|
||||||
-- target("miniaudio_capture")
|
|
||||||
-- set_kind("binary")
|
|
||||||
-- add_packages("miniaudio")
|
|
||||||
-- if is_os("windows") then
|
|
||||||
-- add_files("test/audio_capture/miniaudio.cpp")
|
|
||||||
-- end
|
|
||||||
|
|
||||||
-- target("screen_capturer")
|
|
||||||
-- set_kind("binary")
|
|
||||||
-- add_packages("sdl2", "imgui", "ffmpeg", "openh264")
|
|
||||||
-- add_files("test/screen_capturer/linux_capture.cpp")
|
|
||||||
-- add_ldflags("-lavformat", "-lavdevice", "-lavfilter", "-lavcodec",
|
|
||||||
-- "-lswscale", "-lavutil", "-lswresample",
|
|
||||||
-- "-lasound", "-lxcb-shape", "-lxcb-xfixes", "-lsndio", "-lxcb",
|
|
||||||
-- "-lxcb-shm", "-lXext", "-lX11", "-lXv", "-lpthread", "-lSDL2", "-lopenh264",
|
|
||||||
-- "-ldl", {force = true})
|
|
||||||
|
|
||||||
-- target("screen_capturer")
|
|
||||||
-- set_kind("binary")
|
|
||||||
-- add_packages("sdl2", "imgui", "ffmpeg", "openh264")
|
|
||||||
-- add_files("test/screen_capturer/mac_capture.cpp")
|
|
||||||
-- add_ldflags("-lavformat", "-lavdevice", "-lavfilter", "-lavcodec",
|
|
||||||
-- "-lswscale", "-lavutil", "-lswresample",
|
|
||||||
-- "-lasound", "-lxcb-shape", "-lxcb-xfixes", "-lsndio", "-lxcb",
|
|
||||||
-- "-lxcb-shm", "-lXext", "-lX11", "-lXv", "-lpthread", "-lSDL2", "-lopenh264",
|
|
||||||
-- "-ldl", {force = true})
|
|
||||||
|
|
||||||
-- target("audio_capture")
|
|
||||||
-- set_kind("binary")
|
|
||||||
-- add_packages("ffmpeg")
|
|
||||||
-- add_files("test/audio_capture/sdl2_audio_capture.cpp")
|
|
||||||
-- add_includedirs("test/audio_capture")
|
|
||||||
-- add_ldflags("-lavformat", "-lavdevice", "-lavfilter", "-lavcodec",
|
|
||||||
-- "-lswscale", "-lavutil", "-lswresample",
|
|
||||||
-- "-lasound", "-lxcb-shape", "-lxcb-xfixes", "-lsndio", "-lxcb",
|
|
||||||
-- "-lxcb-shm", "-lXext", "-lX11", "-lXv", "-lpthread", "-lSDL2", "-lopenh264",
|
|
||||||
-- "-ldl", {force = true})
|
|
||||||
-- add_links("Shlwapi", "Strmiids", "Vfw32", "Secur32", "Mfuuid")
|
|
||||||
|
|
||||||
-- target("play_audio")
|
|
||||||
-- set_kind("binary")
|
|
||||||
-- add_packages("ffmpeg")
|
|
||||||
-- add_files("test/audio_capture/play_loopback.cpp")
|
|
||||||
-- add_includedirs("test/audio_capture")
|
|
||||||
-- add_ldflags("-lavformat", "-lavdevice", "-lavfilter", "-lavcodec",
|
|
||||||
-- "-lswscale", "-lavutil", "-lswresample",
|
|
||||||
-- "-lasound", "-lxcb-shape", "-lxcb-xfixes", "-lsndio", "-lxcb",
|
|
||||||
-- "-lxcb-shm", "-lXext", "-lX11", "-lXv", "-lpthread", "-lSDL2", "-lopenh264",
|
|
||||||
-- "-ldl", {force = true})
|
|
||||||
-- add_links("Shlwapi", "Strmiids", "Vfw32", "Secur32", "Mfuuid")
|
|
||||||
|
|
||||||
-- target("audio_capture")
|
|
||||||
-- set_kind("binary")
|
|
||||||
-- add_packages("libopus")
|
|
||||||
-- add_files("test/audio_capture/sdl2_audio_capture.cpp")
|
|
||||||
-- add_includedirs("test/audio_capture")
|
|
||||||
-- add_ldflags("-lavformat", "-lavdevice", "-lavfilter", "-lavcodec",
|
|
||||||
-- "-lswscale", "-lavutil", "-lswresample",
|
|
||||||
-- "-lasound", "-lxcb-shape", "-lxcb-xfixes", "-lsndio", "-lxcb",
|
|
||||||
-- "-lxcb-shm", "-lXext", "-lX11", "-lXv", "-lpthread", "-lSDL2", "-lopenh264",
|
|
||||||
-- "-ldl", {force = true})
|
|
||||||
|
|
||||||
-- target("mouse_control")
|
|
||||||
-- set_kind("binary")
|
|
||||||
-- add_files("test/linux_mouse_control/mouse_control.cpp")
|
|
||||||
-- add_includedirs("test/linux_mouse_control")
|
|
||||||
Reference in New Issue
Block a user