Remove the temporal delimiter OBU during transmition

This commit is contained in:
dijunkun
2024-05-09 16:53:56 +08:00
parent 6bc8aaabdc
commit c8d21794f5
6 changed files with 33 additions and 28 deletions

View File

@@ -95,14 +95,13 @@ std::vector<Obu> ParseObus(uint8_t* payload, int payload_size) {
// Skip obus that shouldn't be transfered over rtp.
int obu_type = ObuType(obu.header_);
// if (obu_type != kObuTypeTemporalDelimiter && //
// obu_type != kObuTypeTileList && //
// obu_type != kObuTypePadding) {
// result.push_back(obu);
// }
if (1) {
if (obu_type != kObuTypeTemporalDelimiter && obu_type != kObuTypeTileList &&
obu_type != kObuTypePadding) {
result.push_back(obu);
}
// if (1) {
// result.push_back(obu);
// }
}
// for (int i = 0; i < result.size(); i++) {

View File

@@ -17,7 +17,7 @@ RtpCodec ::RtpCodec(RtpPacket::PAYLOAD_TYPE payload_type)
has_padding_(false),
has_extension_(false),
payload_type_(payload_type),
sequence_number_(0) {
sequence_number_(1) {
fec_encoder_.Init();
}
@@ -541,7 +541,7 @@ void RtpCodec::Encode(VideoFrameType frame_type, uint8_t* buffer, size_t size,
rtp_packet.SetAv1AggrHeader(0, 0, 1, 0);
rtp_packet.EncodeAv1(obus[i].data_, obus[i].size_);
// LOG_ERROR("enc payload size = {}", rtp_packet.PayloadSize());
packets.emplace_back(rtp_packet);
} else {
size_t last_packet_size = obus[i].size_ % MAX_NALU_LEN;
@@ -583,7 +583,6 @@ void RtpCodec::Encode(VideoFrameType frame_type, uint8_t* buffer, size_t size,
MAX_NALU_LEN);
}
// LOG_ERROR("enc payload size = {}", rtp_packet.PayloadSize());
packets.emplace_back(rtp_packet);
}
}

View File

@@ -2,8 +2,6 @@
#include <string>
#include "log.h"
void RtpPacket::TryToDecodeRtpPacket() {
if (PAYLOAD_TYPE::H264 == PAYLOAD_TYPE(buffer_[1] & 0x7F)) {
nal_unit_type_ = NAL_UNIT_TYPE(buffer_[12] & 0x1F);

View File

@@ -7,6 +7,8 @@
#include <vector>
#include "log.h"
// Common
// 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
@@ -350,14 +352,30 @@ class RtpPacket {
ParseRtpData();
int z, y, w, n;
GetAv1AggrHeader(z, y, w, n);
return !z && !y;
// return !z && !y;
if (z == 0 && y == 0 && w == 1) {
return true;
} else if (z == 0 && y == 1 & w == 1) {
return true;
} else {
return false;
}
}
bool Av1FrameEnd() {
ParseRtpData();
int z, y, w, n;
GetAv1AggrHeader(z, y, w, n);
return z && !y;
// return z && !y;
if (z == 0 && y == 0 && w == 1) {
return true;
} else if (z == 1 && y == 0 & w == 1) {
return true;
} else {
return false;
}
}
private:
@@ -372,7 +390,7 @@ class RtpPacket {
uint32_t total_csrc_number_ = 0;
bool marker_ = false;
uint32_t payload_type_ = 0;
uint16_t sequence_number_ = 0;
uint16_t sequence_number_ = 1;
uint32_t timestamp_ = 0;
uint32_t ssrc_ = 0;
std::vector<uint32_t> csrcs_;

View File

@@ -241,11 +241,7 @@ bool RtpVideoReceiver::CheckIsH264FrameCompleted(RtpPacket& rtp_packet) {
bool RtpVideoReceiver::CheckIsAv1FrameCompleted(RtpPacket& rtp_packet) {
if (rtp_packet.Av1FrameEnd()) {
uint16_t end_seq = rtp_packet.SequenceNumber();
if (incomplete_frame_list_.size() == end_seq) {
return true;
}
size_t start = rtp_packet.SequenceNumber();
size_t start = end_seq;
bool start_count = 0;
while (end_seq--) {
auto it = incomplete_frame_list_.find(end_seq);
@@ -258,19 +254,14 @@ bool RtpVideoReceiver::CheckIsAv1FrameCompleted(RtpPacket& rtp_packet) {
continue;
} else if (it->second.Av1FrameStart()) {
start = it->second.SequenceNumber();
// skip temporal delimiter OBU
start_count++;
if (start_count == 1)
break;
else
break;
break;
} else {
LOG_WARN("What happened?")
return false;
}
}
if (start != rtp_packet.SequenceNumber()) {
if (start <= rtp_packet.SequenceNumber()) {
if (!nv12_data_) {
nv12_data_ = new uint8_t[NV12_BUFFER_SIZE];
}