mirror of
https://github.com/kunkundi/crossdesk.git
synced 2025-10-27 04:35:34 +08:00
Implementation for openh264 codec
This commit is contained in:
@@ -591,8 +591,7 @@ int main() {
|
|||||||
|
|
||||||
if (tc >= 0) {
|
if (tc >= 0) {
|
||||||
SendData(peer_server, DATA_TYPE::VIDEO,
|
SendData(peer_server, DATA_TYPE::VIDEO,
|
||||||
(const char *)nv12_buffer, NV12_BUFFER_SIZE);
|
(const char *)data, NV12_BUFFER_SIZE);
|
||||||
std::cout << "Send" << std::endl;
|
|
||||||
last_frame_time_ = now_time;
|
last_frame_time_ = now_time;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -5,14 +5,7 @@
|
|||||||
#define NV12_BUFFER_SIZE 1280 * 720 * 3 / 2
|
#define NV12_BUFFER_SIZE 1280 * 720 * 3 / 2
|
||||||
unsigned char nv12_buffer_[NV12_BUFFER_SIZE];
|
unsigned char nv12_buffer_[NV12_BUFFER_SIZE];
|
||||||
|
|
||||||
FILE *file = nullptr;
|
ScreenCaptureX11::ScreenCaptureX11() {}
|
||||||
|
|
||||||
ScreenCaptureX11::ScreenCaptureX11() {
|
|
||||||
file = fopen("nv12.yuv", "w+b");
|
|
||||||
if (!file) {
|
|
||||||
printf("Fail to open nv12.yuv\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ScreenCaptureX11::~ScreenCaptureX11() {
|
ScreenCaptureX11::~ScreenCaptureX11() {
|
||||||
if (capture_thread_->joinable()) {
|
if (capture_thread_->joinable()) {
|
||||||
@@ -79,7 +72,7 @@ int ScreenCaptureX11::Init(const RECORD_DESKTOP_RECT &rect, const int fps,
|
|||||||
}
|
}
|
||||||
|
|
||||||
pFrame_ = av_frame_alloc();
|
pFrame_ = av_frame_alloc();
|
||||||
pFrameNV12_ = av_frame_alloc();
|
pFrameNv12_ = av_frame_alloc();
|
||||||
|
|
||||||
const int pixel_w = 1280, pixel_h = 720;
|
const int pixel_w = 1280, pixel_h = 720;
|
||||||
int screen_w = 1280, screen_h = 720;
|
int screen_w = 1280, screen_h = 720;
|
||||||
@@ -100,31 +93,24 @@ int ScreenCaptureX11::Init(const RECORD_DESKTOP_RECT &rect, const int fps,
|
|||||||
int ScreenCaptureX11::Start() {
|
int ScreenCaptureX11::Start() {
|
||||||
capture_thread_.reset(new std::thread([this]() {
|
capture_thread_.reset(new std::thread([this]() {
|
||||||
while (1) {
|
while (1) {
|
||||||
printf("00000\n");
|
|
||||||
if (av_read_frame(pFormatCtx_, packet_) >= 0) {
|
if (av_read_frame(pFormatCtx_, packet_) >= 0) {
|
||||||
printf("111111444444\n");
|
|
||||||
if (packet_->stream_index == videoindex_) {
|
if (packet_->stream_index == videoindex_) {
|
||||||
printf("11111155555\n");
|
|
||||||
avcodec_send_packet(pCodecCtx_, packet_);
|
avcodec_send_packet(pCodecCtx_, packet_);
|
||||||
got_picture_ = avcodec_receive_frame(pCodecCtx_, pFrame_);
|
got_picture_ = avcodec_receive_frame(pCodecCtx_, pFrame_);
|
||||||
printf("33333333\n");
|
|
||||||
if (!got_picture_) {
|
|
||||||
printf("44444444444\n");
|
|
||||||
|
|
||||||
av_image_fill_arrays(pFrameNV12_->data, pFrameNV12_->linesize,
|
if (!got_picture_) {
|
||||||
|
av_image_fill_arrays(pFrameNv12_->data, pFrameNv12_->linesize,
|
||||||
nv12_buffer_, AV_PIX_FMT_NV12, pFrame_->width,
|
nv12_buffer_, AV_PIX_FMT_NV12, pFrame_->width,
|
||||||
pFrame_->height, 1);
|
pFrame_->height, 1);
|
||||||
|
|
||||||
sws_scale(img_convert_ctx_, pFrame_->data, pFrame_->linesize, 0,
|
sws_scale(img_convert_ctx_, pFrame_->data, pFrame_->linesize, 0,
|
||||||
pFrame_->height, pFrameNV12_->data,
|
pFrame_->height, pFrameNv12_->data,
|
||||||
pFrameNV12_->linesize);
|
pFrameNv12_->linesize);
|
||||||
|
|
||||||
fwrite(nv12_buffer_, 1, pFrame_->width * pFrame_->height * 3 / 2,
|
|
||||||
file);
|
|
||||||
|
|
||||||
_on_data((unsigned char *)nv12_buffer_,
|
_on_data((unsigned char *)nv12_buffer_,
|
||||||
pFrame_->width * pFrame_->height * 3 / 2, pFrame_->width,
|
pFrame_->width * pFrame_->height * 3 / 2, pFrame_->width,
|
||||||
pFrame_->height);
|
pFrame_->height);
|
||||||
|
// av_packet_unref(packet_);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ class ScreenCaptureX11 {
|
|||||||
AVDictionary *options_ = nullptr;
|
AVDictionary *options_ = nullptr;
|
||||||
AVInputFormat *ifmt_ = nullptr;
|
AVInputFormat *ifmt_ = nullptr;
|
||||||
AVFrame *pFrame_ = nullptr;
|
AVFrame *pFrame_ = nullptr;
|
||||||
AVFrame *pFrameNV12_ = nullptr;
|
AVFrame *pFrameNv12_ = nullptr;
|
||||||
AVPacket *packet_ = nullptr;
|
AVPacket *packet_ = nullptr;
|
||||||
struct SwsContext *img_convert_ctx_ = nullptr;
|
struct SwsContext *img_convert_ctx_ = nullptr;
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
set_project("remote_desk")
|
set_project("remote_desk")
|
||||||
set_version("0.0.1")
|
set_version("0.0.1")
|
||||||
set_license("GPL-3.0")
|
set_license("LGPL-3.0")
|
||||||
|
|
||||||
add_rules("mode.release", "mode.debug")
|
add_rules("mode.release", "mode.debug")
|
||||||
set_languages("c++17")
|
set_languages("c++17")
|
||||||
@@ -15,6 +15,7 @@ if is_os("windows") then
|
|||||||
add_links("Shell32", "windowsapp", "dwmapi", "User32", "kernel32")
|
add_links("Shell32", "windowsapp", "dwmapi", "User32", "kernel32")
|
||||||
add_requires("vcpkg::ffmpeg 5.1.2", {configs = {shared = false}})
|
add_requires("vcpkg::ffmpeg 5.1.2", {configs = {shared = false}})
|
||||||
elseif is_os("linux") then
|
elseif is_os("linux") then
|
||||||
|
add_requireconfs("ffmpeg.x264", {configs = {pic = true}})
|
||||||
add_requires("ffmpeg 5.1.2", {system = false})
|
add_requires("ffmpeg 5.1.2", {system = false})
|
||||||
add_syslinks("pthread", "dl")
|
add_syslinks("pthread", "dl")
|
||||||
elseif is_os("macosx") then
|
elseif is_os("macosx") then
|
||||||
@@ -63,7 +64,7 @@ target("remote_desk")
|
|||||||
elseif is_os("linux") then
|
elseif is_os("linux") then
|
||||||
add_links("SDL2")
|
add_links("SDL2")
|
||||||
add_ldflags("-lavformat", "-lavdevice", "-lavfilter", "-lavcodec",
|
add_ldflags("-lavformat", "-lavdevice", "-lavfilter", "-lavcodec",
|
||||||
"-lswscale", "-lavutil", "-lswresample", "-lpostproc",
|
"-lswscale", "-lavutil", "-lswresample",
|
||||||
"-lasound", "-lxcb-shape", "-lxcb-xfixes", "-lsndio", "-lxcb",
|
"-lasound", "-lxcb-shape", "-lxcb-xfixes", "-lsndio", "-lxcb",
|
||||||
"-lxcb-shm", "-lXext", "-lX11", "-lXv", "-ldl", "-lpthread", {force = true})
|
"-lxcb-shm", "-lXext", "-lX11", "-lXv", "-ldl", "-lpthread", {force = true})
|
||||||
end
|
end
|
||||||
@@ -74,6 +75,6 @@ target("linux_capture")
|
|||||||
add_files("remote_desk_gui/linux_capture.cpp")
|
add_files("remote_desk_gui/linux_capture.cpp")
|
||||||
add_links("SDL2")
|
add_links("SDL2")
|
||||||
add_ldflags("-lavformat", "-lavdevice", "-lavfilter", "-lavcodec",
|
add_ldflags("-lavformat", "-lavdevice", "-lavfilter", "-lavcodec",
|
||||||
"-lswscale", "-lavutil", "-lswresample", "-lpostproc",
|
"-lswscale", "-lavutil", "-lswresample",
|
||||||
"-lasound", "-lxcb-shape", "-lxcb-xfixes", "-lsndio", "-lxcb",
|
"-lasound", "-lxcb-shape", "-lxcb-xfixes", "-lsndio", "-lxcb",
|
||||||
"-lxcb-shm", "-lXext", "-lX11", "-lXv", "-lpthread", "-lx264", "-ldl", "-lSDL2" ,{force = true})
|
"-lxcb-shm", "-lXext", "-lX11", "-lXv", "-lpthread", "-lx264", "-ldl", "-lSDL2" ,{force = true})
|
||||||
142
src/media/video/decode/openh264/openh264_decoder.cpp
Normal file
142
src/media/video/decode/openh264/openh264_decoder.cpp
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
#include "openh264_decoder.h"
|
||||||
|
|
||||||
|
#include <cstring>
|
||||||
|
|
||||||
|
#include "log.h"
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
extern "C" {
|
||||||
|
#include <libavcodec/avcodec.h>
|
||||||
|
#include <libavdevice/avdevice.h>
|
||||||
|
#include <libavfilter/avfilter.h>
|
||||||
|
#include <libavformat/avformat.h>
|
||||||
|
#include <libavutil/imgutils.h>
|
||||||
|
#include <libswscale/swscale.h>
|
||||||
|
};
|
||||||
|
#ifdef __cplusplus
|
||||||
|
};
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#define SAVE_DECODER_STREAM 0
|
||||||
|
static const int YUV420P_BUFFER_SIZE = 1280 * 720 * 3 / 2;
|
||||||
|
|
||||||
|
int YUV420ToNV12PFFmpeg(unsigned char *src_buffer, int width, int height,
|
||||||
|
unsigned char *dst_buffer) {
|
||||||
|
AVFrame *Input_pFrame = av_frame_alloc();
|
||||||
|
AVFrame *Output_pFrame = av_frame_alloc();
|
||||||
|
struct SwsContext *img_convert_ctx = sws_getContext(
|
||||||
|
width, height, AV_PIX_FMT_YUV420P, 1280, 720, AV_PIX_FMT_NV12,
|
||||||
|
SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
|
||||||
|
|
||||||
|
av_image_fill_arrays(Input_pFrame->data, Input_pFrame->linesize, src_buffer,
|
||||||
|
AV_PIX_FMT_YUV420P, width, height, 1);
|
||||||
|
av_image_fill_arrays(Output_pFrame->data, Output_pFrame->linesize, dst_buffer,
|
||||||
|
AV_PIX_FMT_NV12, 1280, 720, 1);
|
||||||
|
|
||||||
|
sws_scale(img_convert_ctx, (uint8_t const **)Input_pFrame->data,
|
||||||
|
Input_pFrame->linesize, 0, height, Output_pFrame->data,
|
||||||
|
Output_pFrame->linesize);
|
||||||
|
|
||||||
|
if (Input_pFrame) av_free(Input_pFrame);
|
||||||
|
if (Output_pFrame) av_free(Output_pFrame);
|
||||||
|
if (img_convert_ctx) sws_freeContext(img_convert_ctx);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenH264Decoder::OpenH264Decoder() {}
|
||||||
|
OpenH264Decoder::~OpenH264Decoder() {
|
||||||
|
if (nv12_frame_) {
|
||||||
|
delete nv12_frame_;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pData[0]) {
|
||||||
|
delete pData[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pData[1]) {
|
||||||
|
delete pData[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pData[2]) {
|
||||||
|
delete pData[2];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int OpenH264Decoder::Init() {
|
||||||
|
SEncParamExt sParam;
|
||||||
|
sParam.iPicWidth = 1280;
|
||||||
|
sParam.iPicHeight = 720;
|
||||||
|
sParam.iTargetBitrate = 1000;
|
||||||
|
sParam.iTemporalLayerNum = 1;
|
||||||
|
sParam.fMaxFrameRate = 30;
|
||||||
|
sParam.iSpatialLayerNum = 1;
|
||||||
|
|
||||||
|
decoded_frame_size_ = YUV420P_BUFFER_SIZE;
|
||||||
|
decoded_frame_ = new uint8_t[YUV420P_BUFFER_SIZE];
|
||||||
|
nv12_frame_ = new uint8_t[YUV420P_BUFFER_SIZE];
|
||||||
|
pData[0] = new uint8_t[1280 * 720];
|
||||||
|
pData[1] = new uint8_t[1280 * 720];
|
||||||
|
pData[2] = new uint8_t[1280 * 720];
|
||||||
|
|
||||||
|
if (WelsCreateDecoder(&openh264_decoder_) != 0) {
|
||||||
|
LOG_ERROR("Failed to create OpenH264 decoder");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
SDecodingParam sDecParam;
|
||||||
|
|
||||||
|
memset(&sDecParam, 0, sizeof(SDecodingParam));
|
||||||
|
sDecParam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;
|
||||||
|
sDecParam.bParseOnly = false;
|
||||||
|
|
||||||
|
int32_t iRet = openh264_decoder_->Initialize(&sDecParam);
|
||||||
|
|
||||||
|
LOG_ERROR("inited decoded_frame_size_ {}", decoded_frame_size_);
|
||||||
|
LOG_ERROR("inited");
|
||||||
|
printf("1 this is %p\n", this);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int OpenH264Decoder::Decode(
|
||||||
|
const uint8_t *data, int size,
|
||||||
|
std::function<void(VideoFrame)> on_receive_decoded_frame) {
|
||||||
|
if (!openh264_decoder_) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
SBufferInfo sDstBufInfo;
|
||||||
|
memset(&sDstBufInfo, 0, sizeof(SBufferInfo));
|
||||||
|
|
||||||
|
int32_t iRet =
|
||||||
|
openh264_decoder_->DecodeFrameNoDelay(data, size, pData, &sDstBufInfo);
|
||||||
|
|
||||||
|
if (iRet != 0) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sDstBufInfo.iBufferStatus == 1) {
|
||||||
|
if (on_receive_decoded_frame) {
|
||||||
|
memcpy(decoded_frame_, pData[0], frame_width_ * frame_height_);
|
||||||
|
memcpy(decoded_frame_ + frame_width_ * frame_height_, pData[1],
|
||||||
|
frame_width_ * frame_height_ / 2);
|
||||||
|
memcpy(decoded_frame_ + frame_width_ * frame_height_ * 3 / 2, pData[2],
|
||||||
|
frame_width_ * frame_height_ / 2);
|
||||||
|
YUV420ToNV12PFFmpeg(decoded_frame_, frame_width_, frame_height_,
|
||||||
|
nv12_frame_);
|
||||||
|
|
||||||
|
VideoFrame decoded_frame(nv12_frame_,
|
||||||
|
frame_width_ * frame_height_ * 3 / 2,
|
||||||
|
frame_width_, frame_height_);
|
||||||
|
on_receive_decoded_frame(decoded_frame);
|
||||||
|
if (SAVE_DECODER_STREAM) {
|
||||||
|
fwrite((unsigned char *)decoded_frame.Buffer(), 1, decoded_frame.Size(),
|
||||||
|
file_);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
42
src/media/video/decode/openh264/openh264_decoder.h
Normal file
42
src/media/video/decode/openh264/openh264_decoder.h
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
/*
|
||||||
|
* @Author: DI JUNKUN
|
||||||
|
* @Date: 2023-11-03
|
||||||
|
* Copyright (c) 2023 by DI JUNKUN, All Rights Reserved.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef _OPENH264_DECODER_H_
|
||||||
|
#define _OPENH264_DECODER_H_
|
||||||
|
|
||||||
|
#include <wels/codec_api.h>
|
||||||
|
#include <wels/codec_app_def.h>
|
||||||
|
#include <wels/codec_def.h>
|
||||||
|
#include <wels/codec_ver.h>
|
||||||
|
|
||||||
|
#include <functional>
|
||||||
|
|
||||||
|
#include "video_decoder.h"
|
||||||
|
|
||||||
|
class OpenH264Decoder : public VideoDecoder {
|
||||||
|
public:
|
||||||
|
OpenH264Decoder();
|
||||||
|
virtual ~OpenH264Decoder();
|
||||||
|
|
||||||
|
public:
|
||||||
|
int Init();
|
||||||
|
int Decode(const uint8_t* data, int size,
|
||||||
|
std::function<void(VideoFrame)> on_receive_decoded_frame);
|
||||||
|
|
||||||
|
private:
|
||||||
|
ISVCDecoder* openh264_decoder_ = nullptr;
|
||||||
|
bool get_first_keyframe_ = false;
|
||||||
|
bool skip_frame_ = false;
|
||||||
|
FILE* file_ = nullptr;
|
||||||
|
uint8_t* decoded_frame_ = nullptr;
|
||||||
|
int decoded_frame_size_ = 0;
|
||||||
|
uint8_t* nv12_frame_ = nullptr;
|
||||||
|
unsigned char* pData[3] = {};
|
||||||
|
int frame_width_ = 1280;
|
||||||
|
int frame_height_ = 720;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif
|
||||||
@@ -5,6 +5,7 @@
|
|||||||
#else
|
#else
|
||||||
#include "ffmpeg/ffmpeg_video_decoder.h"
|
#include "ffmpeg/ffmpeg_video_decoder.h"
|
||||||
#include "nvcodec/nvidia_video_decoder.h"
|
#include "nvcodec/nvidia_video_decoder.h"
|
||||||
|
#include "openh264/openh264_decoder.h"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#include "log.h"
|
#include "log.h"
|
||||||
@@ -25,7 +26,8 @@ std::unique_ptr<VideoDecoder> VideoDecoderFactory::CreateVideoDecoder(
|
|||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return std::make_unique<FfmpegVideoDecoder>(FfmpegVideoDecoder());
|
// return std::make_unique<FfmpegVideoDecoder>(FfmpegVideoDecoder());
|
||||||
|
return std::make_unique<OpenH264Decoder>(OpenH264Decoder());
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,14 +4,48 @@
|
|||||||
|
|
||||||
#include "log.h"
|
#include "log.h"
|
||||||
|
|
||||||
#define SAVE_ENCODER_STREAM 0
|
#define SAVE_NV12_STREAM 0
|
||||||
|
#define SAVE_H264_STREAM 1
|
||||||
|
|
||||||
|
#define YUV420P_BUFFER_SIZE 1280 * 720 * 3 / 2
|
||||||
|
unsigned char yuv420p_buffer[YUV420P_BUFFER_SIZE];
|
||||||
|
|
||||||
|
int NV12ToYUV420PFFmpeg(unsigned char *src_buffer, int width, int height,
|
||||||
|
unsigned char *dst_buffer) {
|
||||||
|
AVFrame *Input_pFrame = av_frame_alloc();
|
||||||
|
AVFrame *Output_pFrame = av_frame_alloc();
|
||||||
|
struct SwsContext *img_convert_ctx = sws_getContext(
|
||||||
|
width, height, AV_PIX_FMT_NV12, 1280, 720, AV_PIX_FMT_YUV420P,
|
||||||
|
SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
|
||||||
|
|
||||||
|
av_image_fill_arrays(Input_pFrame->data, Input_pFrame->linesize, src_buffer,
|
||||||
|
AV_PIX_FMT_NV12, width, height, 1);
|
||||||
|
av_image_fill_arrays(Output_pFrame->data, Output_pFrame->linesize, dst_buffer,
|
||||||
|
AV_PIX_FMT_YUV420P, 1280, 720, 1);
|
||||||
|
|
||||||
|
sws_scale(img_convert_ctx, (uint8_t const **)Input_pFrame->data,
|
||||||
|
Input_pFrame->linesize, 0, height, Output_pFrame->data,
|
||||||
|
Output_pFrame->linesize);
|
||||||
|
|
||||||
|
if (Input_pFrame) av_free(Input_pFrame);
|
||||||
|
if (Output_pFrame) av_free(Output_pFrame);
|
||||||
|
if (img_convert_ctx) sws_freeContext(img_convert_ctx);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
FFmpegVideoEncoder::FFmpegVideoEncoder() {}
|
FFmpegVideoEncoder::FFmpegVideoEncoder() {}
|
||||||
FFmpegVideoEncoder::~FFmpegVideoEncoder() {
|
FFmpegVideoEncoder::~FFmpegVideoEncoder() {
|
||||||
if (SAVE_ENCODER_STREAM && file_) {
|
if (SAVE_NV12_STREAM && file_nv12_) {
|
||||||
fflush(file_);
|
fflush(file_nv12_);
|
||||||
fclose(file_);
|
fclose(file_nv12_);
|
||||||
file_ = nullptr;
|
file_nv12_ = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SAVE_H264_STREAM && file_h264_) {
|
||||||
|
fflush(file_h264_);
|
||||||
|
fclose(file_h264_);
|
||||||
|
file_h264_ = nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nv12_data_) {
|
if (nv12_data_) {
|
||||||
@@ -25,14 +59,19 @@ FFmpegVideoEncoder::~FFmpegVideoEncoder() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int FFmpegVideoEncoder::Init() {
|
int FFmpegVideoEncoder::Init() {
|
||||||
av_log_set_level(AV_LOG_ERROR);
|
av_log_set_level(AV_LOG_VERBOSE);
|
||||||
|
|
||||||
codec_ = avcodec_find_encoder(AV_CODEC_ID_H264);
|
codec_ = avcodec_find_encoder(AV_CODEC_ID_H264);
|
||||||
|
|
||||||
if (!codec_) {
|
if (!codec_) {
|
||||||
LOG_ERROR("Failed to find H.264 encoder");
|
LOG_ERROR("Failed to find H.264 encoder");
|
||||||
return -1;
|
return -1;
|
||||||
|
} else {
|
||||||
|
LOG_INFO("Use H264 encoder [{}]", codec_->name);
|
||||||
|
if (0 == strcmp(codec_->name, "libx264")) {
|
||||||
|
use_libx264_ = true;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
use_libx264_ = true;
|
||||||
|
|
||||||
codec_ctx_ = avcodec_alloc_context3(codec_);
|
codec_ctx_ = avcodec_alloc_context3(codec_);
|
||||||
if (!codec_ctx_) {
|
if (!codec_ctx_) {
|
||||||
@@ -46,7 +85,11 @@ int FFmpegVideoEncoder::Init() {
|
|||||||
codec_ctx_->height = frame_height;
|
codec_ctx_->height = frame_height;
|
||||||
codec_ctx_->time_base.num = 1;
|
codec_ctx_->time_base.num = 1;
|
||||||
codec_ctx_->time_base.den = fps_;
|
codec_ctx_->time_base.den = fps_;
|
||||||
codec_ctx_->pix_fmt = AV_PIX_FMT_NV12;
|
if (use_libx264_) {
|
||||||
|
codec_ctx_->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||||
|
} else {
|
||||||
|
codec_ctx_->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||||
|
}
|
||||||
codec_ctx_->gop_size = keyFrameInterval_;
|
codec_ctx_->gop_size = keyFrameInterval_;
|
||||||
codec_ctx_->keyint_min = keyFrameInterval_;
|
codec_ctx_->keyint_min = keyFrameInterval_;
|
||||||
codec_ctx_->max_b_frames = 0;
|
codec_ctx_->max_b_frames = 0;
|
||||||
@@ -77,10 +120,17 @@ int FFmpegVideoEncoder::Init() {
|
|||||||
|
|
||||||
packet_ = av_packet_alloc();
|
packet_ = av_packet_alloc();
|
||||||
|
|
||||||
if (SAVE_ENCODER_STREAM) {
|
if (SAVE_H264_STREAM) {
|
||||||
file_ = fopen("encode_stream.h264", "w+b");
|
file_h264_ = fopen("encoded_stream.h264", "w+b");
|
||||||
if (!file_) {
|
if (!file_h264_) {
|
||||||
LOG_WARN("Fail to open stream.h264");
|
LOG_WARN("Fail to open encoded_stream.h264");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SAVE_NV12_STREAM) {
|
||||||
|
file_nv12_ = fopen("raw_stream.yuv", "w+b");
|
||||||
|
if (!file_nv12_) {
|
||||||
|
LOG_WARN("Fail to open raw_stream.yuv");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -95,10 +145,34 @@ int FFmpegVideoEncoder::Encode(
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (use_libx264_) {
|
||||||
|
NV12ToYUV420PFFmpeg((unsigned char *)pData, frame_->width, frame_->height,
|
||||||
|
(unsigned char *)yuv420p_buffer);
|
||||||
|
memcpy(frame_->data[0], yuv420p_buffer, frame_->width * frame_->height);
|
||||||
|
memcpy(frame_->data[1], yuv420p_buffer + frame_->width * frame_->height,
|
||||||
|
frame_->width * frame_->height / 2);
|
||||||
|
memcpy(frame_->data[2],
|
||||||
|
yuv420p_buffer + frame_->width * frame_->height * 3 / 2,
|
||||||
|
frame_->width * frame_->height / 2);
|
||||||
|
|
||||||
|
// frame_->data[0] = yuv420p_buffer;
|
||||||
|
// frame_->data[1] = yuv420p_buffer + frame_->width * frame_->height;
|
||||||
|
// frame_->data[2] = yuv420p_buffer + frame_->width * frame_->height * 3 /
|
||||||
|
// 2;
|
||||||
|
|
||||||
|
if (SAVE_NV12_STREAM) {
|
||||||
|
fwrite(yuv420p_buffer, 1, nSize, file_nv12_);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
memcpy(frame_->data[0], pData, frame_->width * frame_->height);
|
memcpy(frame_->data[0], pData, frame_->width * frame_->height);
|
||||||
memcpy(frame_->data[1], pData + frame_->width * frame_->height,
|
memcpy(frame_->data[1], pData + frame_->width * frame_->height,
|
||||||
frame_->width * frame_->height / 2);
|
frame_->width * frame_->height / 2);
|
||||||
|
|
||||||
|
if (SAVE_NV12_STREAM) {
|
||||||
|
fwrite(pData, 1, nSize, file_nv12_);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
frame_->pts = pts_++;
|
frame_->pts = pts_++;
|
||||||
|
|
||||||
int ret = avcodec_send_frame(codec_ctx_, frame_);
|
int ret = avcodec_send_frame(codec_ctx_, frame_);
|
||||||
@@ -113,17 +187,17 @@ int FFmpegVideoEncoder::Encode(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Remove first 6 bytes in I frame, SEI ?
|
// Remove first 6 bytes in I frame, SEI ?
|
||||||
if (0x00 == packet_->data[0] && 0x00 == packet_->data[1] &&
|
// if (0x00 == packet_->data[0] && 0x00 == packet_->data[1] &&
|
||||||
0x00 == packet_->data[2] && 0x01 == packet_->data[3] &&
|
// 0x00 == packet_->data[2] && 0x01 == packet_->data[3] &&
|
||||||
0x09 == packet_->data[4] && 0x10 == packet_->data[5]) {
|
// 0x09 == packet_->data[4] && 0x10 == packet_->data[5]) {
|
||||||
packet_->data += 6;
|
// packet_->data += 6;
|
||||||
packet_->size -= 6;
|
// packet_->size -= 6;
|
||||||
}
|
// }
|
||||||
|
|
||||||
if (on_encoded_image) {
|
if (on_encoded_image) {
|
||||||
on_encoded_image((char *)packet_->data, packet_->size);
|
on_encoded_image((char *)packet_->data, packet_->size);
|
||||||
if (SAVE_ENCODER_STREAM) {
|
if (SAVE_H264_STREAM) {
|
||||||
fwrite(packet_->data, 1, packet_->size, file_);
|
fwrite(packet_->data, 1, packet_->size, file_h264_);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
OnEncodedImage((char *)packet_->data, packet_->size);
|
OnEncodedImage((char *)packet_->data, packet_->size);
|
||||||
|
|||||||
@@ -14,10 +14,12 @@ extern "C" {
|
|||||||
#endif
|
#endif
|
||||||
extern "C" {
|
extern "C" {
|
||||||
#include <libavcodec/avcodec.h>
|
#include <libavcodec/avcodec.h>
|
||||||
|
#include <libavdevice/avdevice.h>
|
||||||
|
#include <libavfilter/avfilter.h>
|
||||||
#include <libavformat/avformat.h>
|
#include <libavformat/avformat.h>
|
||||||
#include <libavutil/imgutils.h>
|
#include <libavutil/imgutils.h>
|
||||||
#include <libavutil/opt.h>
|
#include <libswscale/swscale.h>
|
||||||
}
|
};
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
};
|
};
|
||||||
#endif
|
#endif
|
||||||
@@ -51,9 +53,11 @@ class FFmpegVideoEncoder : public VideoEncoder {
|
|||||||
|
|
||||||
std::vector<std::vector<uint8_t>> encoded_packets_;
|
std::vector<std::vector<uint8_t>> encoded_packets_;
|
||||||
unsigned char* encoded_image_ = nullptr;
|
unsigned char* encoded_image_ = nullptr;
|
||||||
FILE* file_ = nullptr;
|
FILE* file_h264_ = nullptr;
|
||||||
|
FILE* file_nv12_ = nullptr;
|
||||||
unsigned char* nv12_data_ = nullptr;
|
unsigned char* nv12_data_ = nullptr;
|
||||||
unsigned int seq_ = 0;
|
unsigned int seq_ = 0;
|
||||||
|
bool use_libx264_ = false;
|
||||||
|
|
||||||
const AVCodec* codec_ = nullptr;
|
const AVCodec* codec_ = nullptr;
|
||||||
AVCodecContext* codec_ctx_ = nullptr;
|
AVCodecContext* codec_ctx_ = nullptr;
|
||||||
|
|||||||
271
src/media/video/encode/openh264/openh264_encoder.cpp
Normal file
271
src/media/video/encode/openh264/openh264_encoder.cpp
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
#include "openh264_encoder.h"
|
||||||
|
|
||||||
|
#include <chrono>
|
||||||
|
|
||||||
|
#include "log.h"
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
extern "C" {
|
||||||
|
#include <libavcodec/avcodec.h>
|
||||||
|
#include <libavdevice/avdevice.h>
|
||||||
|
#include <libavfilter/avfilter.h>
|
||||||
|
#include <libavformat/avformat.h>
|
||||||
|
#include <libavutil/imgutils.h>
|
||||||
|
#include <libswscale/swscale.h>
|
||||||
|
};
|
||||||
|
#ifdef __cplusplus
|
||||||
|
};
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#define YUV420P_BUFFER_SIZE 1280 * 720 * 3 / 2
|
||||||
|
unsigned char yuv420p_buffer[YUV420P_BUFFER_SIZE];
|
||||||
|
|
||||||
|
int NV12ToYUV420PFFmpeg(unsigned char *src_buffer, int width, int height,
|
||||||
|
unsigned char *dst_buffer) {
|
||||||
|
AVFrame *Input_pFrame = av_frame_alloc();
|
||||||
|
AVFrame *Output_pFrame = av_frame_alloc();
|
||||||
|
struct SwsContext *img_convert_ctx = sws_getContext(
|
||||||
|
width, height, AV_PIX_FMT_NV12, 1280, 720, AV_PIX_FMT_YUV420P,
|
||||||
|
SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
|
||||||
|
|
||||||
|
av_image_fill_arrays(Input_pFrame->data, Input_pFrame->linesize, src_buffer,
|
||||||
|
AV_PIX_FMT_NV12, width, height, 1);
|
||||||
|
av_image_fill_arrays(Output_pFrame->data, Output_pFrame->linesize, dst_buffer,
|
||||||
|
AV_PIX_FMT_YUV420P, 1280, 720, 1);
|
||||||
|
|
||||||
|
sws_scale(img_convert_ctx, (uint8_t const **)Input_pFrame->data,
|
||||||
|
Input_pFrame->linesize, 0, height, Output_pFrame->data,
|
||||||
|
Output_pFrame->linesize);
|
||||||
|
|
||||||
|
if (Input_pFrame) av_free(Input_pFrame);
|
||||||
|
if (Output_pFrame) av_free(Output_pFrame);
|
||||||
|
if (img_convert_ctx) sws_freeContext(img_convert_ctx);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenH264Encoder::OpenH264Encoder() { delete encoded_frame_; }
|
||||||
|
OpenH264Encoder::~OpenH264Encoder() { Release(); }
|
||||||
|
|
||||||
|
SEncParamExt OpenH264Encoder::CreateEncoderParams() const {
|
||||||
|
SEncParamExt encoder_params;
|
||||||
|
openh264_encoder_->GetDefaultParams(&encoder_params);
|
||||||
|
// if (codec_.mode == VideoCodecMode::kRealtimeVideo) { //
|
||||||
|
encoder_params.iUsageType = CAMERA_VIDEO_REAL_TIME;
|
||||||
|
// } else if (codec_.mode == VideoCodecMode::kScreensharing) {
|
||||||
|
// encoder_params.iUsageType = SCREEN_CONTENT_REAL_TIME;
|
||||||
|
// }
|
||||||
|
|
||||||
|
encoder_params.iPicWidth = frame_width_;
|
||||||
|
encoder_params.iPicHeight = frame_height_;
|
||||||
|
encoder_params.iTargetBitrate = target_bitrate_;
|
||||||
|
encoder_params.iMaxBitrate = max_bitrate_;
|
||||||
|
encoder_params.iRCMode = RC_BITRATE_MODE;
|
||||||
|
encoder_params.fMaxFrameRate = max_frame_rate_;
|
||||||
|
encoder_params.bEnableFrameSkip = false;
|
||||||
|
encoder_params.uiIntraPeriod = key_frame_interval_;
|
||||||
|
encoder_params.uiMaxNalSize = 0;
|
||||||
|
// Threading model: use auto.
|
||||||
|
// 0: auto (dynamic imp. internal encoder)
|
||||||
|
// 1: single thread (default value)
|
||||||
|
// >1: number of threads
|
||||||
|
encoder_params.iMultipleThreadIdc = 1;
|
||||||
|
// The base spatial layer 0 is the only one we use.
|
||||||
|
encoder_params.sSpatialLayers[0].iVideoWidth = encoder_params.iPicWidth;
|
||||||
|
encoder_params.sSpatialLayers[0].iVideoHeight = encoder_params.iPicHeight;
|
||||||
|
encoder_params.sSpatialLayers[0].fFrameRate = encoder_params.fMaxFrameRate;
|
||||||
|
encoder_params.sSpatialLayers[0].iSpatialBitrate =
|
||||||
|
encoder_params.iTargetBitrate;
|
||||||
|
encoder_params.sSpatialLayers[0].iMaxSpatialBitrate =
|
||||||
|
encoder_params.iMaxBitrate;
|
||||||
|
encoder_params.iTemporalLayerNum = 1;
|
||||||
|
if (encoder_params.iTemporalLayerNum > 1) {
|
||||||
|
encoder_params.iNumRefFrame = 1;
|
||||||
|
}
|
||||||
|
LOG_INFO("OpenH264 version is [{}.{}]", OPENH264_MAJOR, OPENH264_MINOR);
|
||||||
|
|
||||||
|
// SingleNalUnit
|
||||||
|
encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1;
|
||||||
|
encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode =
|
||||||
|
SM_SIZELIMITED_SLICE;
|
||||||
|
encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint =
|
||||||
|
static_cast<unsigned int>(max_payload_size_);
|
||||||
|
LOG_INFO("Encoder is configured with NALU constraint: {} bytes",
|
||||||
|
max_payload_size_);
|
||||||
|
|
||||||
|
return encoder_params;
|
||||||
|
}
|
||||||
|
|
||||||
|
int OpenH264Encoder::Init() {
|
||||||
|
// Create encoder.
|
||||||
|
if (WelsCreateSVCEncoder(&openh264_encoder_) != 0) {
|
||||||
|
LOG_ERROR("Failed to create OpenH264 encoder");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
encoded_frame_ = new uint8_t[YUV420P_BUFFER_SIZE];
|
||||||
|
|
||||||
|
int trace_level = WELS_LOG_WARNING;
|
||||||
|
openh264_encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level);
|
||||||
|
|
||||||
|
// Create encoder parameters based on the layer configuration.
|
||||||
|
SEncParamExt encoder_params = CreateEncoderParams();
|
||||||
|
|
||||||
|
if (openh264_encoder_->InitializeExt(&encoder_params) != 0) {
|
||||||
|
LOG_ERROR("Failed to initialize OpenH264 encoder");
|
||||||
|
// Release();
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int video_format = EVideoFormatType::videoFormatI420;
|
||||||
|
openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, &video_format);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
int OpenH264Encoder::Encode(
|
||||||
|
const uint8_t *pData, int nSize,
|
||||||
|
std::function<int(char *encoded_packets, size_t size)> on_encoded_image) {
|
||||||
|
if (!openh264_encoder_) {
|
||||||
|
LOG_ERROR("Invalid openh264 encoder");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (0 == seq_++ % 300) {
|
||||||
|
ForceIdr();
|
||||||
|
}
|
||||||
|
|
||||||
|
NV12ToYUV420PFFmpeg((unsigned char *)pData, frame_width_, frame_height_,
|
||||||
|
(unsigned char *)yuv420p_buffer);
|
||||||
|
// memcpy(frame_->data[0], yuv420p_buffer, frame_width_ * frame_height_);
|
||||||
|
// memcpy(frame_->data[1], yuv420p_buffer + frame_width_ * frame_height_,
|
||||||
|
// frame_width_ * frame_height_ / 2);
|
||||||
|
// memcpy(frame_->data[2], yuv420p_buffer + frame_width_ * frame_height_ * 3 /
|
||||||
|
// 2,
|
||||||
|
// frame_width_ * frame_height_ / 2);
|
||||||
|
|
||||||
|
raw_frame_ = {0};
|
||||||
|
raw_frame_.iPicWidth = frame_width_;
|
||||||
|
raw_frame_.iPicHeight = frame_height_;
|
||||||
|
raw_frame_.iColorFormat = EVideoFormatType::videoFormatI420;
|
||||||
|
raw_frame_.uiTimeStamp =
|
||||||
|
std::chrono::high_resolution_clock::now().time_since_epoch().count();
|
||||||
|
|
||||||
|
raw_frame_.iStride[0] = frame_width_;
|
||||||
|
raw_frame_.iStride[1] = frame_width_ >> 1;
|
||||||
|
raw_frame_.iStride[2] = frame_width_ >> 1;
|
||||||
|
raw_frame_.pData[0] = (unsigned char *)yuv420p_buffer;
|
||||||
|
raw_frame_.pData[1] = raw_frame_.pData[0] + frame_width_ * frame_height_;
|
||||||
|
raw_frame_.pData[2] =
|
||||||
|
raw_frame_.pData[1] + (frame_width_ * frame_height_ >> 2);
|
||||||
|
|
||||||
|
SFrameBSInfo info;
|
||||||
|
memset(&info, 0, sizeof(SFrameBSInfo));
|
||||||
|
|
||||||
|
int enc_ret = openh264_encoder_->EncodeFrame(&raw_frame_, &info);
|
||||||
|
if (enc_ret != 0) {
|
||||||
|
LOG_ERROR("OpenH264 frame encoding failed, EncodeFrame returned {}",
|
||||||
|
enc_ret);
|
||||||
|
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
#if 0
|
||||||
|
int encoded_frame_size = 0;
|
||||||
|
|
||||||
|
for (int layer = 0; layer < info.iLayerNum; ++layer) {
|
||||||
|
const SLayerBSInfo &layerInfo = info.sLayerInfo[layer];
|
||||||
|
size_t layer_len = 0;
|
||||||
|
memcpy(encoded_frame_ + encoded_frame_size, layerInfo.pBsBuf, layer_len);
|
||||||
|
encoded_frame_size += layer_len;
|
||||||
|
}
|
||||||
|
|
||||||
|
encoded_frame_size_ = encoded_frame_size;
|
||||||
|
|
||||||
|
if (on_encoded_image) {
|
||||||
|
on_encoded_image((char *)encoded_frame_, encoded_frame_size_);
|
||||||
|
} else {
|
||||||
|
OnEncodedImage((char *)encoded_frame_, encoded_frame_size_);
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
if (info.eFrameType == videoFrameTypeInvalid) {
|
||||||
|
LOG_ERROR("videoFrameTypeInvalid");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int temporal_id = 0;
|
||||||
|
|
||||||
|
int encoded_frame_size = 0;
|
||||||
|
|
||||||
|
if (info.eFrameType != videoFrameTypeSkip) {
|
||||||
|
int layer = 0;
|
||||||
|
while (layer < info.iLayerNum) {
|
||||||
|
SLayerBSInfo *pLayerBsInfo = &(info.sLayerInfo[layer]);
|
||||||
|
if (pLayerBsInfo != NULL) {
|
||||||
|
int layer_size = 0;
|
||||||
|
temporal_id = pLayerBsInfo->uiTemporalId;
|
||||||
|
int nal_index = pLayerBsInfo->iNalCount - 1;
|
||||||
|
do {
|
||||||
|
layer_size += pLayerBsInfo->pNalLengthInByte[nal_index];
|
||||||
|
--nal_index;
|
||||||
|
} while (nal_index >= 0);
|
||||||
|
memcpy(encoded_frame_ + encoded_frame_size, pLayerBsInfo->pBsBuf,
|
||||||
|
layer_size);
|
||||||
|
encoded_frame_size += layer_size;
|
||||||
|
}
|
||||||
|
++layer;
|
||||||
|
}
|
||||||
|
|
||||||
|
got_output = true;
|
||||||
|
|
||||||
|
} else {
|
||||||
|
is_keyframe = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (encoded_frame_size > 0) {
|
||||||
|
encoded_frame_size_ = encoded_frame_size;
|
||||||
|
|
||||||
|
if (on_encoded_image) {
|
||||||
|
on_encoded_image((char *)encoded_frame_, encoded_frame_size_);
|
||||||
|
} else {
|
||||||
|
OnEncodedImage((char *)encoded_frame_, encoded_frame_size_);
|
||||||
|
}
|
||||||
|
|
||||||
|
EVideoFrameType ft_temp = info.eFrameType;
|
||||||
|
if (ft_temp == 1 || ft_temp == 2) {
|
||||||
|
is_keyframe = true;
|
||||||
|
} else if (ft_temp == 3) {
|
||||||
|
is_keyframe = false;
|
||||||
|
if (temporal_) {
|
||||||
|
if (temporal_id == 0 || temporal_id == 1) {
|
||||||
|
is_keyframe = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
is_keyframe = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int OpenH264Encoder::OnEncodedImage(char *encoded_packets, size_t size) {
|
||||||
|
LOG_INFO("OnEncodedImage not implemented");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void OpenH264Encoder::ForceIdr() {
|
||||||
|
if (openh264_encoder_) {
|
||||||
|
openh264_encoder_->ForceIntraFrame(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int OpenH264Encoder::Release() {
|
||||||
|
if (openh264_encoder_) {
|
||||||
|
WelsDestroySVCEncoder(openh264_encoder_);
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
62
src/media/video/encode/openh264/openh264_encoder.h
Normal file
62
src/media/video/encode/openh264/openh264_encoder.h
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
/*
|
||||||
|
* @Author: DI JUNKUN
|
||||||
|
* @Date: 2023-11-03
|
||||||
|
* Copyright (c) 2023 by DI JUNKUN, All Rights Reserved.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef _OPENH264_ENCODER_H_
|
||||||
|
#define _OPENH264_ENCODER_H_
|
||||||
|
|
||||||
|
#include <wels/codec_api.h>
|
||||||
|
#include <wels/codec_app_def.h>
|
||||||
|
#include <wels/codec_def.h>
|
||||||
|
#include <wels/codec_ver.h>
|
||||||
|
|
||||||
|
#include <functional>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#include "video_encoder.h"
|
||||||
|
|
||||||
|
class OpenH264Encoder : public VideoEncoder {
|
||||||
|
public:
|
||||||
|
OpenH264Encoder();
|
||||||
|
virtual ~OpenH264Encoder();
|
||||||
|
|
||||||
|
int Init();
|
||||||
|
int Encode(
|
||||||
|
const uint8_t* pData, int nSize,
|
||||||
|
std::function<int(char* encoded_packets, size_t size)> on_encoded_image);
|
||||||
|
|
||||||
|
virtual int OnEncodedImage(char* encoded_packets, size_t size);
|
||||||
|
|
||||||
|
void ForceIdr();
|
||||||
|
|
||||||
|
private:
|
||||||
|
SEncParamExt CreateEncoderParams() const;
|
||||||
|
int Release();
|
||||||
|
|
||||||
|
private:
|
||||||
|
int frame_width_ = 1280;
|
||||||
|
int frame_height_ = 720;
|
||||||
|
int key_frame_interval_ = 3000;
|
||||||
|
int target_bitrate_ = 1000;
|
||||||
|
int max_bitrate_ = 1000;
|
||||||
|
int max_payload_size_ = 3000;
|
||||||
|
int max_frame_rate_ = 30;
|
||||||
|
std::vector<std::vector<uint8_t>> encoded_packets_;
|
||||||
|
unsigned char* encoded_image_ = nullptr;
|
||||||
|
FILE* file_ = nullptr;
|
||||||
|
unsigned char* nv12_data_ = nullptr;
|
||||||
|
unsigned int seq_ = 0;
|
||||||
|
|
||||||
|
// openh264
|
||||||
|
ISVCEncoder* openh264_encoder_ = nullptr;
|
||||||
|
SSourcePicture raw_frame_;
|
||||||
|
uint8_t* encoded_frame_ = nullptr;
|
||||||
|
int encoded_frame_size_ = 0;
|
||||||
|
bool got_output = false;
|
||||||
|
bool is_keyframe = false;
|
||||||
|
int temporal_ = 1;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif
|
||||||
@@ -5,6 +5,7 @@
|
|||||||
#else
|
#else
|
||||||
#include "ffmpeg/ffmpeg_video_encoder.h"
|
#include "ffmpeg/ffmpeg_video_encoder.h"
|
||||||
#include "nvcodec/nvidia_video_encoder.h"
|
#include "nvcodec/nvidia_video_encoder.h"
|
||||||
|
#include "openh264/openh264_encoder.h"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#include "log.h"
|
#include "log.h"
|
||||||
@@ -25,7 +26,8 @@ std::unique_ptr<VideoEncoder> VideoEncoderFactory::CreateVideoEncoder(
|
|||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return std::make_unique<FFmpegVideoEncoder>(FFmpegVideoEncoder());
|
// return std::make_unique<FFmpegVideoEncoder>(FFmpegVideoEncoder());
|
||||||
|
return std::make_unique<OpenH264Encoder>(OpenH264Encoder());
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ void RtpVideoReceiver::InsertRtpPacket(RtpPacket& rtp_packet) {
|
|||||||
|
|
||||||
rtcp_rr.Encode();
|
rtcp_rr.Encode();
|
||||||
|
|
||||||
SendRtcpRR(rtcp_rr);
|
// SendRtcpRR(rtcp_rr);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (RtpPacket::NAL_UNIT_TYPE::NALU == rtp_packet.NalUnitType()) {
|
if (RtpPacket::NAL_UNIT_TYPE::NALU == rtp_packet.NalUnitType()) {
|
||||||
|
|||||||
@@ -85,7 +85,7 @@ int RtpVideoSender::SendRtpPacket(RtpPacket& rtp_packet) {
|
|||||||
|
|
||||||
rtcp_sr.Encode();
|
rtcp_sr.Encode();
|
||||||
|
|
||||||
SendRtcpSR(rtcp_sr);
|
// SendRtcpSR(rtcp_sr);
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
|
|||||||
6
thirdparty/ffmpeg/xmake.lua
vendored
6
thirdparty/ffmpeg/xmake.lua
vendored
@@ -28,7 +28,7 @@ package("ffmpeg")
|
|||||||
add_versions("git:5.0.1", "n5.0.1")
|
add_versions("git:5.0.1", "n5.0.1")
|
||||||
add_versions("git:4.0.2", "n4.0.2")
|
add_versions("git:4.0.2", "n4.0.2")
|
||||||
|
|
||||||
add_configs("gpl", {description = "Enable GPL code", default = true, type = "boolean"})
|
add_configs("gpl", {description = "Enable GPL code", default = false, type = "boolean"})
|
||||||
add_configs("ffprobe", {description = "Enable ffprobe program.", default = false, type = "boolean"})
|
add_configs("ffprobe", {description = "Enable ffprobe program.", default = false, type = "boolean"})
|
||||||
add_configs("ffmpeg", {description = "Enable ffmpeg program.", default = true, type = "boolean"})
|
add_configs("ffmpeg", {description = "Enable ffmpeg program.", default = true, type = "boolean"})
|
||||||
add_configs("ffplay", {description = "Enable ffplay program.", default = false, type = "boolean"})
|
add_configs("ffplay", {description = "Enable ffplay program.", default = false, type = "boolean"})
|
||||||
@@ -42,14 +42,14 @@ package("ffmpeg")
|
|||||||
add_configs("vdpau", {description = "Enable vdpau library.", default = false, type = "boolean"})
|
add_configs("vdpau", {description = "Enable vdpau library.", default = false, type = "boolean"})
|
||||||
add_configs("hardcoded-tables", {description = "Enable hardcoded tables.", default = true, type = "boolean"})
|
add_configs("hardcoded-tables", {description = "Enable hardcoded tables.", default = true, type = "boolean"})
|
||||||
add_configs("asm", {description = "Enable asm", default = false, type = "boolean"})
|
add_configs("asm", {description = "Enable asm", default = false, type = "boolean"})
|
||||||
add_configs("libx264", {description = "Enable libx264", default = true, type = "boolean"})
|
add_configs("libopenh264", {description = "Enable libopenh264", default = true, type = "boolean"})
|
||||||
end
|
end
|
||||||
|
|
||||||
add_links("avfilter", "avdevice", "avformat", "avcodec", "swscale", "swresample", "avutil")
|
add_links("avfilter", "avdevice", "avformat", "avcodec", "swscale", "swresample", "avutil")
|
||||||
if is_plat("macosx") then
|
if is_plat("macosx") then
|
||||||
add_frameworks("CoreFoundation", "Foundation", "CoreVideo", "CoreMedia", "AudioToolbox", "VideoToolbox", "Security")
|
add_frameworks("CoreFoundation", "Foundation", "CoreVideo", "CoreMedia", "AudioToolbox", "VideoToolbox", "Security")
|
||||||
elseif is_plat("linux") then
|
elseif is_plat("linux") then
|
||||||
add_syslinks("pthread")
|
add_syslinks("pthread", "openh264")
|
||||||
end
|
end
|
||||||
|
|
||||||
if is_plat("linux", "macosx") then
|
if is_plat("linux", "macosx") then
|
||||||
|
|||||||
11
xmake.lua
11
xmake.lua
@@ -1,6 +1,6 @@
|
|||||||
set_project("projectx")
|
set_project("projectx")
|
||||||
set_version("0.0.1")
|
set_version("0.0.1")
|
||||||
set_license("GPL-3.0")
|
set_license("LGPL-3.0")
|
||||||
|
|
||||||
add_rules("mode.release", "mode.debug")
|
add_rules("mode.release", "mode.debug")
|
||||||
set_languages("c++17")
|
set_languages("c++17")
|
||||||
@@ -34,6 +34,7 @@ elseif is_os("linux") then
|
|||||||
add_requires("ffmpeg 5.1.2")
|
add_requires("ffmpeg 5.1.2")
|
||||||
add_requires("glib", {system = true})
|
add_requires("glib", {system = true})
|
||||||
add_requires("vcpkg::libnice 0.1.21")
|
add_requires("vcpkg::libnice 0.1.21")
|
||||||
|
add_requires("openh264")
|
||||||
add_packages("glib", "vcpkg::libnice")
|
add_packages("glib", "vcpkg::libnice")
|
||||||
elseif is_os("macosx") then
|
elseif is_os("macosx") then
|
||||||
add_requires("ffmpeg 5.1.2", {system = false})
|
add_requires("ffmpeg 5.1.2", {system = false})
|
||||||
@@ -143,19 +144,23 @@ target("media")
|
|||||||
add_linkdirs("thirdparty/nvcodec/Lib/x64")
|
add_linkdirs("thirdparty/nvcodec/Lib/x64")
|
||||||
add_links("cuda", "nvencodeapi", "nvcuvid")
|
add_links("cuda", "nvencodeapi", "nvcuvid")
|
||||||
elseif is_os(("linux")) then
|
elseif is_os(("linux")) then
|
||||||
add_packages("cuda", "ffmpeg")
|
add_packages("cuda", "ffmpeg", "openh264")
|
||||||
add_files("src/media/video/encode/*.cpp",
|
add_files("src/media/video/encode/*.cpp",
|
||||||
"src/media/video/decode/*.cpp",
|
"src/media/video/decode/*.cpp",
|
||||||
"src/media/video/encode/nvcodec/*.cpp",
|
"src/media/video/encode/nvcodec/*.cpp",
|
||||||
"src/media/video/decode/nvcodec/*.cpp",
|
"src/media/video/decode/nvcodec/*.cpp",
|
||||||
"src/media/video/encode/ffmpeg/*.cpp",
|
"src/media/video/encode/ffmpeg/*.cpp",
|
||||||
"src/media/video/decode/ffmpeg/*.cpp")
|
"src/media/video/decode/ffmpeg/*.cpp",
|
||||||
|
"src/media/video/encode/openh264/*.cpp",
|
||||||
|
"src/media/video/decode/openh264/*.cpp")
|
||||||
add_includedirs("src/media/video/encode",
|
add_includedirs("src/media/video/encode",
|
||||||
"src/media/video/decode",
|
"src/media/video/decode",
|
||||||
"src/media/video/encode/nvcodec",
|
"src/media/video/encode/nvcodec",
|
||||||
"src/media/video/decode/nvcodec",
|
"src/media/video/decode/nvcodec",
|
||||||
"src/media/video/encode/ffmpeg",
|
"src/media/video/encode/ffmpeg",
|
||||||
"src/media/video/decode/ffmpeg",
|
"src/media/video/decode/ffmpeg",
|
||||||
|
"src/media/video/encode/openh264",
|
||||||
|
"src/media/video/decode/openh264",
|
||||||
"thirdparty/nvcodec/Interface",
|
"thirdparty/nvcodec/Interface",
|
||||||
"thirdparty/nvcodec/Samples", {public = true})
|
"thirdparty/nvcodec/Samples", {public = true})
|
||||||
add_linkdirs("thirdparty/nvcodec/Lib/x64")
|
add_linkdirs("thirdparty/nvcodec/Lib/x64")
|
||||||
|
|||||||
Reference in New Issue
Block a user