mirror of
https://github.com/kunkundi/crossdesk.git
synced 2025-10-26 20:25:34 +08:00
200 lines
5.6 KiB
C++
200 lines
5.6 KiB
C++
#include "openh264_decoder.h"
|
|
|
|
#include <cstring>
|
|
|
|
#include "libyuv.h"
|
|
#include "log.h"
|
|
|
|
// #define SAVE_DECODED_NV12_STREAM
|
|
// #define SAVE_RECEIVED_H264_STREAM
|
|
|
|
void CopyYuvWithStride(uint8_t *src_y, uint8_t *src_u, uint8_t *src_v,
|
|
int width, int height, int stride_y, int stride_u,
|
|
int stride_v, uint8_t *yuv420p_frame) {
|
|
int actual_stride_y = width;
|
|
int actual_stride_u = width / 2;
|
|
int actual_stride_v = width / 2;
|
|
|
|
for (int row = 0; row < height; row++) {
|
|
memcpy(yuv420p_frame, src_y, actual_stride_y);
|
|
src_y += stride_y;
|
|
yuv420p_frame += actual_stride_y;
|
|
}
|
|
|
|
for (int row = 0; row < height / 2; row++) {
|
|
memcpy(yuv420p_frame, src_u, actual_stride_u);
|
|
src_u += stride_u;
|
|
yuv420p_frame += actual_stride_u;
|
|
}
|
|
|
|
for (int row = 0; row < height / 2; row++) {
|
|
memcpy(yuv420p_frame, src_v, actual_stride_v);
|
|
src_v += stride_v;
|
|
yuv420p_frame += actual_stride_v;
|
|
}
|
|
}
|
|
|
|
OpenH264Decoder::OpenH264Decoder(std::shared_ptr<SystemClock> clock)
|
|
: clock_(clock) {}
|
|
OpenH264Decoder::~OpenH264Decoder() {
|
|
if (openh264_decoder_) {
|
|
openh264_decoder_->Uninitialize();
|
|
WelsDestroyDecoder(openh264_decoder_);
|
|
}
|
|
|
|
if (nv12_frame_) {
|
|
delete[] nv12_frame_;
|
|
}
|
|
|
|
if (yuv420p_frame_) {
|
|
delete[] yuv420p_frame_;
|
|
}
|
|
|
|
#ifdef SAVE_DECODED_NV12_STREAM
|
|
if (nv12_stream_) {
|
|
fflush(nv12_stream_);
|
|
nv12_stream_ = nullptr;
|
|
}
|
|
#endif
|
|
|
|
#ifdef SAVE_RECEIVED_H264_STREAM
|
|
if (h264_stream_) {
|
|
fflush(h264_stream_);
|
|
h264_stream_ = nullptr;
|
|
}
|
|
#endif
|
|
}
|
|
|
|
int OpenH264Decoder::Init() {
|
|
#ifdef SAVE_DECODED_NV12_STREAM
|
|
nv12_stream_ = fopen("nv12_receive_.yuv", "w+b");
|
|
if (!nv12_stream_) {
|
|
LOG_WARN("Fail to open nv12_receive_.yuv");
|
|
}
|
|
#endif
|
|
|
|
#ifdef SAVE_RECEIVED_H264_STREAM
|
|
h264_stream_ = fopen("h264_receive.h264", "w+b");
|
|
if (!h264_stream_) {
|
|
LOG_WARN("Fail to open h264_receive.h264");
|
|
}
|
|
#endif
|
|
|
|
frame_width_ = 1280;
|
|
frame_height_ = 720;
|
|
|
|
if (WelsCreateDecoder(&openh264_decoder_) != 0) {
|
|
LOG_ERROR("Failed to create OpenH264 decoder");
|
|
return -1;
|
|
}
|
|
|
|
SDecodingParam sDecParam;
|
|
|
|
memset(&sDecParam, 0, sizeof(SDecodingParam));
|
|
sDecParam.uiTargetDqLayer = UCHAR_MAX;
|
|
sDecParam.eEcActiveIdc = ERROR_CON_SLICE_COPY;
|
|
sDecParam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;
|
|
|
|
openh264_decoder_->Initialize(&sDecParam);
|
|
|
|
int trace_level = WELS_LOG_QUIET;
|
|
openh264_decoder_->SetOption(DECODER_OPTION_TRACE_LEVEL, &trace_level);
|
|
|
|
return 0;
|
|
}
|
|
|
|
int OpenH264Decoder::Decode(
|
|
std::unique_ptr<ReceivedFrame> received_frame,
|
|
std::function<void(const DecodedFrame &)> on_receive_decoded_frame) {
|
|
if (!openh264_decoder_) {
|
|
return -1;
|
|
}
|
|
|
|
const uint8_t *data = received_frame->Buffer();
|
|
size_t size = received_frame->Size();
|
|
|
|
if (data == nullptr) {
|
|
LOG_WARN("data is nullptr!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
|
|
}
|
|
|
|
#ifdef SAVE_RECEIVED_H264_STREAM
|
|
fwrite((unsigned char *)data, 1, size, h264_stream_);
|
|
#endif
|
|
|
|
if (size > 4 && (*(data + 4) & 0x1f) == 0x07) {
|
|
// Key frame received
|
|
}
|
|
|
|
SBufferInfo sDstBufInfo;
|
|
memset(&sDstBufInfo, 0, sizeof(SBufferInfo));
|
|
|
|
int ret = openh264_decoder_->DecodeFrameNoDelay(
|
|
data, (int)size, yuv420p_planes_, &sDstBufInfo);
|
|
if (ret != 0) {
|
|
LOG_ERROR("Failed to decode frame, error code: {}", ret);
|
|
return -1;
|
|
}
|
|
|
|
frame_width_ = sDstBufInfo.UsrData.sSystemBuffer.iWidth;
|
|
frame_height_ = sDstBufInfo.UsrData.sSystemBuffer.iHeight;
|
|
yuv420p_frame_size_ = frame_width_ * frame_height_ * 3 / 2;
|
|
nv12_frame_size_ = frame_width_ * frame_height_ * 3 / 2;
|
|
|
|
if (!yuv420p_frame_) {
|
|
yuv420p_frame_capacity_ = yuv420p_frame_size_;
|
|
yuv420p_frame_ = new unsigned char[yuv420p_frame_capacity_];
|
|
}
|
|
|
|
if (yuv420p_frame_capacity_ < yuv420p_frame_size_) {
|
|
yuv420p_frame_capacity_ = yuv420p_frame_size_;
|
|
delete[] yuv420p_frame_;
|
|
yuv420p_frame_ = new unsigned char[yuv420p_frame_capacity_];
|
|
}
|
|
|
|
if (!nv12_frame_) {
|
|
nv12_frame_capacity_ = yuv420p_frame_size_;
|
|
nv12_frame_ = new unsigned char[nv12_frame_capacity_];
|
|
}
|
|
|
|
if (nv12_frame_capacity_ < yuv420p_frame_size_) {
|
|
nv12_frame_capacity_ = yuv420p_frame_size_;
|
|
delete[] nv12_frame_;
|
|
nv12_frame_ = new unsigned char[nv12_frame_capacity_];
|
|
}
|
|
|
|
if (sDstBufInfo.iBufferStatus == 1) {
|
|
if (on_receive_decoded_frame) {
|
|
CopyYuvWithStride(
|
|
yuv420p_planes_[0], yuv420p_planes_[1], yuv420p_planes_[2],
|
|
sDstBufInfo.UsrData.sSystemBuffer.iWidth,
|
|
sDstBufInfo.UsrData.sSystemBuffer.iHeight,
|
|
sDstBufInfo.UsrData.sSystemBuffer.iStride[0],
|
|
sDstBufInfo.UsrData.sSystemBuffer.iStride[1],
|
|
sDstBufInfo.UsrData.sSystemBuffer.iStride[2], yuv420p_frame_);
|
|
|
|
libyuv::I420ToNV12(
|
|
(const uint8_t *)yuv420p_frame_, frame_width_,
|
|
(const uint8_t *)yuv420p_frame_ + frame_width_ * frame_height_,
|
|
frame_width_ / 2,
|
|
(const uint8_t *)yuv420p_frame_ +
|
|
frame_width_ * frame_height_ * 5 / 4,
|
|
frame_width_ / 2, (uint8_t *)nv12_frame_, frame_width_,
|
|
(uint8_t *)nv12_frame_ + frame_width_ * frame_height_, frame_width_,
|
|
frame_width_, frame_height_);
|
|
|
|
DecodedFrame decoded_frame(nv12_frame_, nv12_frame_capacity_,
|
|
frame_width_, frame_height_);
|
|
decoded_frame.SetReceivedTimestamp(received_frame->ReceivedTimestamp());
|
|
decoded_frame.SetCapturedTimestamp(received_frame->CapturedTimestamp());
|
|
decoded_frame.SetDecodedTimestamp(clock_->CurrentTime());
|
|
|
|
#ifdef SAVE_DECODED_NV12_STREAM
|
|
fwrite((unsigned char *)decoded_frame.Buffer(), 1, decoded_frame.Size(),
|
|
nv12_stream_);
|
|
#endif
|
|
on_receive_decoded_frame(decoded_frame);
|
|
}
|
|
}
|
|
|
|
return 0;
|
|
} |