Fix blurred screen caused by yuv to nv12 error

This commit is contained in:
dijunkun
2024-05-08 14:27:57 +08:00
parent 114c80cd72
commit e0d2ab5a9f
5 changed files with 98 additions and 22 deletions

View File

@@ -10,6 +10,30 @@ extern "C" {
#include <libswscale/swscale.h>
};
static int YUV420PToNV12FFmpeg(unsigned char *src_buffer, int width, int height,
unsigned char *dst_buffer) {
AVFrame *Input_pFrame = av_frame_alloc();
AVFrame *Output_pFrame = av_frame_alloc();
struct SwsContext *img_convert_ctx = sws_getContext(
width, height, AV_PIX_FMT_YUV420P, 1280, 720, AV_PIX_FMT_NV12,
SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
av_image_fill_arrays(Input_pFrame->data, Input_pFrame->linesize, src_buffer,
AV_PIX_FMT_YUV420P, width, height, 1);
av_image_fill_arrays(Output_pFrame->data, Output_pFrame->linesize, dst_buffer,
AV_PIX_FMT_NV12, 1280, 720, 1);
sws_scale(img_convert_ctx, (uint8_t const **)Input_pFrame->data,
Input_pFrame->linesize, 0, height, Output_pFrame->data,
Output_pFrame->linesize);
if (Input_pFrame) av_free(Input_pFrame);
if (Output_pFrame) av_free(Output_pFrame);
if (img_convert_ctx) sws_freeContext(img_convert_ctx);
return 0;
}
class ScopedDav1dPicture : public std::shared_ptr<ScopedDav1dPicture> {
public:
~ScopedDav1dPicture() { dav1d_picture_unref(&picture_); }
@@ -42,9 +66,14 @@ Dav1dAv1Decoder::~Dav1dAv1Decoder() {
file_ = nullptr;
}
if (decoded_frame_) {
delete decoded_frame_;
decoded_frame_ = nullptr;
if (decoded_frame_yuv_) {
delete decoded_frame_yuv_;
decoded_frame_yuv_ = nullptr;
}
if (decoded_frame_nv12_) {
delete decoded_frame_nv12_;
decoded_frame_nv12_ = nullptr;
}
}
@@ -62,20 +91,35 @@ int Dav1dAv1Decoder::Init() {
LOG_ERROR("Dav1d AV1 decoder open failed");
}
decoded_frame_ = new VideoFrame(1280 * 720 * 3 / 2);
decoded_frame_yuv_ = new VideoFrame(1280 * 720 * 3 / 2);
decoded_frame_nv12_ = new VideoFrame(1280 * 720 * 3 / 2);
if (SAVE_DECODER_STREAM) {
file_ = fopen("decode_stream.yuv", "w+b");
file_ = fopen("decode_stream.ivf", "w+b");
if (!file_) {
LOG_WARN("Fail to open stream.yuv");
LOG_WARN("Fail to open stream.ivf");
}
}
return 0;
}
void YUV420PtoNV12(unsigned char *SrcY, unsigned char *SrcU,
unsigned char *SrcV, unsigned char *Dst, int Width,
int Height) {
memcpy(Dst, SrcY, Width * Height);
unsigned char *DstU = Dst + Width * Height;
for (int i = 0; i < Width * Height / 4; i++) {
(*DstU++) = (*SrcU++);
(*DstU++) = (*SrcV++);
}
}
int Dav1dAv1Decoder::Decode(
const uint8_t *data, int size,
std::function<void(VideoFrame)> on_receive_decoded_frame) {
// if (SAVE_DECODER_STREAM) {
// fwrite((unsigned char *)data, 1, size, file_);
// }
ScopedDav1dData scoped_dav1d_data;
Dav1dData &dav1d_data = scoped_dav1d_data.Data();
dav1d_data_wrap(&dav1d_data, data, size,
@@ -105,16 +149,44 @@ int Dav1dAv1Decoder::Decode(
return -1;
}
memcpy(decoded_frame_->GetBuffer(), dav1d_picture.data[0],
dav1d_picture.p.w * dav1d_picture.p.h);
memcpy(decoded_frame_->GetBuffer() + dav1d_picture.p.w * dav1d_picture.p.h,
dav1d_picture.data[1], dav1d_picture.p.w * dav1d_picture.p.h / 2);
LOG_INFO("dav1d decode size {}", decoded_frame_->Size());
on_receive_decoded_frame(*decoded_frame_);
if (SAVE_DECODER_STREAM) {
fwrite((unsigned char *)decoded_frame_->Buffer(), 1, decoded_frame_->Size(),
file_);
uint32_t start_ts = static_cast<uint32_t>(
std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::high_resolution_clock::now().time_since_epoch())
.count());
if (1) {
YUV420PtoNV12((unsigned char *)dav1d_picture.data[0],
(unsigned char *)dav1d_picture.data[1],
(unsigned char *)dav1d_picture.data[2],
decoded_frame_nv12_->GetBuffer(), dav1d_picture.p.w,
dav1d_picture.p.h);
} else {
memcpy(decoded_frame_yuv_->GetBuffer(), dav1d_picture.data[0],
dav1d_picture.p.w * dav1d_picture.p.h);
memcpy(
decoded_frame_yuv_->GetBuffer() + dav1d_picture.p.w * dav1d_picture.p.h,
dav1d_picture.data[1], dav1d_picture.p.w * dav1d_picture.p.h / 4);
memcpy(decoded_frame_yuv_->GetBuffer() +
dav1d_picture.p.w * dav1d_picture.p.h * 5 / 4,
dav1d_picture.data[2], dav1d_picture.p.w * dav1d_picture.p.h / 4);
YUV420PToNV12FFmpeg(decoded_frame_yuv_->GetBuffer(), dav1d_picture.p.w,
dav1d_picture.p.h, decoded_frame_nv12_->GetBuffer());
}
uint32_t end_ts = static_cast<uint32_t>(
std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::high_resolution_clock::now().time_since_epoch())
.count());
LOG_ERROR("decode time = {}", end_ts - start_ts);
on_receive_decoded_frame(*decoded_frame_nv12_);
// if (SAVE_DECODER_STREAM) {
// fwrite((unsigned char *)decoded_frame_->Buffer(), 1,
// decoded_frame_->Size(),
// file_);
// }
return 0;
}

View File

@@ -46,7 +46,8 @@ class Dav1dAv1Decoder : public VideoDecoder {
AVFrame *frame_nv12_ = nullptr;
struct SwsContext *img_convert_ctx = nullptr;
VideoFrame *decoded_frame_ = nullptr;
VideoFrame *decoded_frame_yuv_ = nullptr;
VideoFrame *decoded_frame_nv12_ = nullptr;
FILE *file_ = nullptr;
bool first_ = false;

View File

@@ -99,7 +99,6 @@ int PeerConnection::Init(PeerConnectionParams params,
(uint8_t *)data, size,
[this, user_id, user_id_size](VideoFrame video_frame) {
if (on_receive_video_buffer_) {
LOG_ERROR("Receive video, size {}", video_frame.Size());
on_receive_video_buffer_((const char *)video_frame.Buffer(),
video_frame.Size(), user_id, user_id_size);
}
@@ -531,7 +530,7 @@ int PeerConnection::SendVideoData(const char *data, size_t size) {
[this](char *encoded_frame, size_t size,
VideoEncoder::VideoFrameType frame_type) -> int {
for (auto &ice_trans : ice_transmission_list_) {
LOG_ERROR("Send frame size: [{}]", size);
// LOG_ERROR("Send frame size: [{}]", size);
// ice_trans.second->SendData(IceTransmission::DATA_TYPE::VIDEO,
// encoded_frame, size);
ice_trans.second->SendVideoData(

View File

@@ -246,20 +246,24 @@ bool RtpVideoReceiver::CheckIsAv1FrameCompleted(RtpPacket& rtp_packet) {
}
size_t start = rtp_packet.SequenceNumber();
bool start_count = 0;
while (end_seq--) {
auto it = incomplete_frame_list_.find(end_seq);
if (it == incomplete_frame_list_.end()) {
// The last fragment has already received. If all fragments are in
// order, then some fragments lost in tranmission and need to be
// repaired using FEC
return false;
// return false;
} else if (!it->second.Av1FrameStart()) {
continue;
} else if (it->second.Av1FrameStart()) {
start = it->second.SequenceNumber();
// skip temporal delimiter OBU
break;
start_count++;
if (start_count == 1)
break;
else
break;
} else {
LOG_WARN("What happened?")
return false;

View File

@@ -62,7 +62,7 @@ int IceTransmission::InitIceTransmission(
});
rtp_video_receiver_->SetOnReceiveCompleteFrame(
[this](VideoFrame &video_frame) -> void {
LOG_ERROR("OnReceiveCompleteFrame {}", video_frame.Size());
// LOG_ERROR("OnReceiveCompleteFrame {}", video_frame.Size());
on_receive_video_((const char *)video_frame.Buffer(),
video_frame.Size(), remote_user_id_.data(),
remote_user_id_.size());