mirror of
https://github.com/kunkundi/crossdesk.git
synced 2025-10-26 20:25:34 +08:00
[fix] fix all unused variables and type conversions
This commit is contained in:
@@ -12,7 +12,7 @@ VideoFrame::VideoFrame(size_t size) {
|
||||
height_ = 0;
|
||||
}
|
||||
|
||||
VideoFrame::VideoFrame(size_t size, size_t width, size_t height) {
|
||||
VideoFrame::VideoFrame(size_t size, uint32_t width, uint32_t height) {
|
||||
buffer_ = new uint8_t[size];
|
||||
size_ = size;
|
||||
width_ = width;
|
||||
@@ -27,8 +27,8 @@ VideoFrame::VideoFrame(const uint8_t *buffer, size_t size) {
|
||||
height_ = 0;
|
||||
}
|
||||
|
||||
VideoFrame::VideoFrame(const uint8_t *buffer, size_t size, size_t width,
|
||||
size_t height) {
|
||||
VideoFrame::VideoFrame(const uint8_t *buffer, size_t size, uint32_t width,
|
||||
uint32_t height) {
|
||||
buffer_ = new uint8_t[size];
|
||||
memcpy(buffer_, buffer, size);
|
||||
size_ = size;
|
||||
|
||||
@@ -14,9 +14,10 @@ class VideoFrame {
|
||||
public:
|
||||
VideoFrame();
|
||||
VideoFrame(size_t size);
|
||||
VideoFrame(size_t size, size_t width, size_t height);
|
||||
VideoFrame(size_t size, uint32_t width, uint32_t height);
|
||||
VideoFrame(const uint8_t *buffer, size_t size);
|
||||
VideoFrame(const uint8_t *buffer, size_t size, size_t width, size_t height);
|
||||
VideoFrame(const uint8_t *buffer, size_t size, uint32_t width,
|
||||
uint32_t height);
|
||||
VideoFrame(const VideoFrame &video_frame);
|
||||
VideoFrame(VideoFrame &&video_frame);
|
||||
VideoFrame &operator=(const VideoFrame &video_frame);
|
||||
@@ -27,18 +28,18 @@ class VideoFrame {
|
||||
public:
|
||||
const uint8_t *Buffer() { return buffer_; }
|
||||
size_t Size() { return size_; }
|
||||
size_t Width() { return width_; }
|
||||
size_t Height() { return height_; }
|
||||
uint32_t Width() { return width_; }
|
||||
uint32_t Height() { return height_; }
|
||||
|
||||
void SetSize(size_t size) { size_ = size; }
|
||||
void SetWidth(size_t width) { width_ = width; }
|
||||
void SetHeight(size_t height) { height_ = height; }
|
||||
void SetWidth(uint32_t width) { width_ = width; }
|
||||
void SetHeight(uint32_t height) { height_ = height; }
|
||||
|
||||
private:
|
||||
uint8_t *buffer_ = nullptr;
|
||||
size_t size_ = 0;
|
||||
size_t width_ = 0;
|
||||
size_t height_ = 0;
|
||||
uint32_t width_ = 0;
|
||||
uint32_t height_ = 0;
|
||||
};
|
||||
|
||||
#endif
|
||||
@@ -190,11 +190,10 @@ int IceAgent::CreateIceAgent(nice_cb_state_changed_t on_state_changed,
|
||||
return 0;
|
||||
}
|
||||
|
||||
void cb_closed(GObject *src, GAsyncResult *res, gpointer data) {
|
||||
NiceAgent *agent = NICE_AGENT(src);
|
||||
g_debug("test-turn:%s: %p", G_STRFUNC, agent);
|
||||
|
||||
*((gboolean *)data) = TRUE;
|
||||
void cb_closed(GObject *src, [[maybe_unused]] GAsyncResult *res,
|
||||
[[maybe_unused]] gpointer data) {
|
||||
[[maybe_unused]] NiceAgent *agent = NICE_AGENT(src);
|
||||
LOG_INFO("Nice agent closed");
|
||||
}
|
||||
|
||||
int IceAgent::DestroyIceAgent() {
|
||||
@@ -384,11 +383,11 @@ int IceAgent::Send(const char *data, size_t size) {
|
||||
// return -1;
|
||||
// }
|
||||
|
||||
int ret = nice_agent_send(agent_, stream_id_, 1, size, data);
|
||||
bool ret = nice_agent_send(agent_, stream_id_, 1, (guint)size, data);
|
||||
|
||||
#ifdef SAVE_IO_STREAM
|
||||
fwrite(data, 1, size, file_out_);
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
return ret ? 0 : -1;
|
||||
}
|
||||
@@ -6,12 +6,20 @@ std::shared_ptr<spdlog::logger> get_logger() {
|
||||
}
|
||||
|
||||
auto now = std::chrono::system_clock::now() + std::chrono::hours(8);
|
||||
auto timet = std::chrono::system_clock::to_time_t(now);
|
||||
auto localTime = *std::gmtime(&timet);
|
||||
auto now_time = std::chrono::system_clock::to_time_t(now);
|
||||
|
||||
std::tm tm_info;
|
||||
|
||||
#ifdef _WIN32
|
||||
gmtime_s(&tm_info, &now_time);
|
||||
#else
|
||||
std::gmtime_r(&now_time, &tm_info);
|
||||
#endif
|
||||
|
||||
std::stringstream ss;
|
||||
std::string filename;
|
||||
ss << LOGGER_NAME;
|
||||
ss << std::put_time(&localTime, "-%Y%m%d-%H%M%S.log");
|
||||
ss << std::put_time(&tm_info, "-%Y%m%d-%H%M%S.log");
|
||||
ss >> filename;
|
||||
|
||||
std::string path = "logs/" + filename;
|
||||
|
||||
@@ -36,11 +36,11 @@ int AudioDecoder::Init() {
|
||||
}
|
||||
|
||||
int AudioDecoder::Decode(
|
||||
const uint8_t* data, int size,
|
||||
const uint8_t* data, size_t size,
|
||||
std::function<void(uint8_t*, int)> on_receive_decoded_frame) {
|
||||
// LOG_ERROR("input opus size = {}", size);
|
||||
auto frame_size =
|
||||
opus_decode(opus_decoder_, data, size, out_data, MAX_FRAME_SIZE, 0);
|
||||
auto frame_size = opus_decode(opus_decoder_, data, (opus_int32)size, out_data,
|
||||
MAX_FRAME_SIZE, 0);
|
||||
|
||||
if (frame_size < 0) {
|
||||
LOG_ERROR("Decode opus frame failed");
|
||||
|
||||
@@ -26,7 +26,7 @@ class AudioDecoder {
|
||||
public:
|
||||
int Init();
|
||||
|
||||
int Decode(const uint8_t *data, int size,
|
||||
int Decode(const uint8_t *data, size_t size,
|
||||
std::function<void(uint8_t *, int)> on_receive_decoded_frame);
|
||||
|
||||
std::string GetDecoderName() { return "Opus"; }
|
||||
|
||||
@@ -52,7 +52,7 @@ int AudioEncoder::Init() {
|
||||
}
|
||||
|
||||
int AudioEncoder::Encode(
|
||||
const uint8_t *data, int size,
|
||||
const uint8_t *data, size_t size,
|
||||
std::function<int(char *encoded_audio_buffer, size_t size)>
|
||||
on_encoded_audio_buffer) {
|
||||
if (!on_encoded_audio_buffer_) {
|
||||
@@ -67,7 +67,7 @@ int AudioEncoder::Encode(
|
||||
// printf("1 Time cost: %d size: %d\n", now_ts - last_ts, size);
|
||||
// last_ts = now_ts;
|
||||
|
||||
auto ret = opus_encode(opus_encoder_, (opus_int16 *)data, size, out_data,
|
||||
auto ret = opus_encode(opus_encoder_, (opus_int16 *)data, (int)size, out_data,
|
||||
MAX_PACKET_SIZE);
|
||||
if (ret < 0) {
|
||||
printf("opus decode failed, %d\n", ret);
|
||||
@@ -76,15 +76,7 @@ int AudioEncoder::Encode(
|
||||
|
||||
if (on_encoded_audio_buffer_) {
|
||||
on_encoded_audio_buffer_((char *)out_data, ret);
|
||||
} else {
|
||||
OnEncodedAudioBuffer((char *)out_data, ret);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int AudioEncoder::OnEncodedAudioBuffer(char *encoded_audio_buffer,
|
||||
size_t size) {
|
||||
LOG_INFO("OnEncodedAudioBuffer not implemented");
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -23,12 +23,10 @@ class AudioEncoder {
|
||||
public:
|
||||
int Init();
|
||||
|
||||
int Encode(const uint8_t* data, int size,
|
||||
int Encode(const uint8_t* data, size_t size,
|
||||
std::function<int(char* encoded_audio_buffer, size_t size)>
|
||||
on_encoded_audio_buffer);
|
||||
|
||||
int OnEncodedAudioBuffer(char* encoded_audio_buffer, size_t size);
|
||||
|
||||
std::string GetEncoderName() { return "Opus"; }
|
||||
|
||||
private:
|
||||
|
||||
@@ -27,230 +27,232 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
#include <mutex>
|
||||
#include <time.h>
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <mutex>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
|
||||
#ifdef _WIN32
|
||||
#include <winsock.h>
|
||||
#include <windows.h>
|
||||
#include <winsock.h>
|
||||
|
||||
#pragma comment(lib, "ws2_32.lib")
|
||||
#undef ERROR
|
||||
#else
|
||||
#include <unistd.h>
|
||||
#include <sys/socket.h>
|
||||
#include <netinet/in.h>
|
||||
#include <arpa/inet.h>
|
||||
#include <netinet/in.h>
|
||||
#include <sys/socket.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#define SOCKET int
|
||||
#define INVALID_SOCKET -1
|
||||
#endif
|
||||
|
||||
enum LogLevel {
|
||||
TRACE,
|
||||
INFO,
|
||||
WARNING,
|
||||
ERROR,
|
||||
FATAL
|
||||
};
|
||||
enum LogLevel { TRACE, INFO, WARNING, ERROR, FATAL };
|
||||
|
||||
namespace simplelogger{
|
||||
namespace simplelogger {
|
||||
class Logger {
|
||||
public:
|
||||
Logger(LogLevel level, bool bPrintTimeStamp) : level(level), bPrintTimeStamp(bPrintTimeStamp) {}
|
||||
virtual ~Logger() {}
|
||||
virtual std::ostream& GetStream() = 0;
|
||||
virtual void FlushStream() {}
|
||||
bool ShouldLogFor(LogLevel l) {
|
||||
return l >= level;
|
||||
public:
|
||||
Logger(LogLevel level, bool bPrintTimeStamp)
|
||||
: level(level), bPrintTimeStamp(bPrintTimeStamp) {}
|
||||
virtual ~Logger() {}
|
||||
virtual std::ostream &GetStream() = 0;
|
||||
virtual void FlushStream() {}
|
||||
bool ShouldLogFor(LogLevel l) { return l >= level; }
|
||||
char *GetLead(LogLevel l, [[maybe_unused]] const char *szFile,
|
||||
[[maybe_unused]] int nLine,
|
||||
[[maybe_unused]] const char *szFunc) {
|
||||
if (l < TRACE || l > FATAL) {
|
||||
sprintf(szLead, "[?????] ");
|
||||
return szLead;
|
||||
}
|
||||
char* GetLead(LogLevel l, const char *szFile, int nLine, const char *szFunc) {
|
||||
if (l < TRACE || l > FATAL) {
|
||||
sprintf(szLead, "[?????] ");
|
||||
return szLead;
|
||||
}
|
||||
const char *szLevels[] = {"TRACE", "INFO", "WARN", "ERROR", "FATAL"};
|
||||
if (bPrintTimeStamp) {
|
||||
time_t t = time(NULL);
|
||||
struct tm *ptm = localtime(&t);
|
||||
sprintf(szLead, "[%-5s][%02d:%02d:%02d] ",
|
||||
szLevels[l], ptm->tm_hour, ptm->tm_min, ptm->tm_sec);
|
||||
} else {
|
||||
sprintf(szLead, "[%-5s] ", szLevels[l]);
|
||||
}
|
||||
return szLead;
|
||||
|
||||
const char *szLevels[] = {"TRACE", "INFO", "WARN", "ERROR", "FATAL"};
|
||||
if (bPrintTimeStamp) {
|
||||
time_t t = time(NULL);
|
||||
struct tm *ptm = localtime(&t);
|
||||
sprintf(szLead, "[%-5s][%02d:%02d:%02d] ", szLevels[l], ptm->tm_hour,
|
||||
ptm->tm_min, ptm->tm_sec);
|
||||
} else {
|
||||
sprintf(szLead, "[%-5s] ", szLevels[l]);
|
||||
}
|
||||
void EnterCriticalSection() {
|
||||
mtx.lock();
|
||||
}
|
||||
void LeaveCriticalSection() {
|
||||
mtx.unlock();
|
||||
}
|
||||
private:
|
||||
LogLevel level;
|
||||
char szLead[80];
|
||||
bool bPrintTimeStamp;
|
||||
std::mutex mtx;
|
||||
return szLead;
|
||||
}
|
||||
void EnterCriticalSection() { mtx.lock(); }
|
||||
void LeaveCriticalSection() { mtx.unlock(); }
|
||||
|
||||
private:
|
||||
LogLevel level;
|
||||
char szLead[80];
|
||||
bool bPrintTimeStamp;
|
||||
std::mutex mtx;
|
||||
};
|
||||
|
||||
class LoggerFactory {
|
||||
public:
|
||||
static Logger* CreateFileLogger(std::string strFilePath,
|
||||
LogLevel level = INFO, bool bPrintTimeStamp = true) {
|
||||
return new FileLogger(strFilePath, level, bPrintTimeStamp);
|
||||
}
|
||||
static Logger* CreateConsoleLogger(LogLevel level = INFO,
|
||||
bool bPrintTimeStamp = true) {
|
||||
return new ConsoleLogger(level, bPrintTimeStamp);
|
||||
}
|
||||
static Logger* CreateUdpLogger(char *szHost, unsigned uPort, LogLevel level = INFO,
|
||||
bool bPrintTimeStamp = true) {
|
||||
return new UdpLogger(szHost, uPort, level, bPrintTimeStamp);
|
||||
}
|
||||
private:
|
||||
LoggerFactory() {}
|
||||
public:
|
||||
static Logger *CreateFileLogger(std::string strFilePath,
|
||||
LogLevel level = INFO,
|
||||
bool bPrintTimeStamp = true) {
|
||||
return new FileLogger(strFilePath, level, bPrintTimeStamp);
|
||||
}
|
||||
static Logger *CreateConsoleLogger(LogLevel level = INFO,
|
||||
bool bPrintTimeStamp = true) {
|
||||
return new ConsoleLogger(level, bPrintTimeStamp);
|
||||
}
|
||||
static Logger *CreateUdpLogger(char *szHost, unsigned uPort,
|
||||
LogLevel level = INFO,
|
||||
bool bPrintTimeStamp = true) {
|
||||
return new UdpLogger(szHost, uPort, level, bPrintTimeStamp);
|
||||
}
|
||||
|
||||
class FileLogger : public Logger {
|
||||
public:
|
||||
FileLogger(std::string strFilePath, LogLevel level, bool bPrintTimeStamp)
|
||||
private:
|
||||
LoggerFactory() {}
|
||||
|
||||
class FileLogger : public Logger {
|
||||
public:
|
||||
FileLogger(std::string strFilePath, LogLevel level, bool bPrintTimeStamp)
|
||||
: Logger(level, bPrintTimeStamp) {
|
||||
pFileOut = new std::ofstream();
|
||||
pFileOut->open(strFilePath.c_str());
|
||||
}
|
||||
~FileLogger() {
|
||||
pFileOut->close();
|
||||
}
|
||||
std::ostream& GetStream() {
|
||||
return *pFileOut;
|
||||
}
|
||||
private:
|
||||
std::ofstream *pFileOut;
|
||||
};
|
||||
pFileOut = new std::ofstream();
|
||||
pFileOut->open(strFilePath.c_str());
|
||||
}
|
||||
~FileLogger() { pFileOut->close(); }
|
||||
std::ostream &GetStream() { return *pFileOut; }
|
||||
|
||||
class ConsoleLogger : public Logger {
|
||||
public:
|
||||
ConsoleLogger(LogLevel level, bool bPrintTimeStamp)
|
||||
private:
|
||||
std::ofstream *pFileOut;
|
||||
};
|
||||
|
||||
class ConsoleLogger : public Logger {
|
||||
public:
|
||||
ConsoleLogger(LogLevel level, bool bPrintTimeStamp)
|
||||
: Logger(level, bPrintTimeStamp) {}
|
||||
std::ostream& GetStream() {
|
||||
return std::cout;
|
||||
std::ostream &GetStream() { return std::cout; }
|
||||
};
|
||||
|
||||
class UdpLogger : public Logger {
|
||||
private:
|
||||
class UdpOstream : public std::ostream {
|
||||
public:
|
||||
UdpOstream(char *szHost, unsigned short uPort)
|
||||
: std::ostream(&sb), socket(INVALID_SOCKET) {
|
||||
#ifdef _WIN32
|
||||
WSADATA w;
|
||||
if (WSAStartup(0x0101, &w) != 0) {
|
||||
fprintf(stderr, "WSAStartup() failed.\n");
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
socket = ::socket(AF_INET, SOCK_DGRAM, 0);
|
||||
if (socket == INVALID_SOCKET) {
|
||||
#ifdef _WIN32
|
||||
WSACleanup();
|
||||
#endif
|
||||
fprintf(stderr, "socket() failed.\n");
|
||||
return;
|
||||
}
|
||||
#ifdef _WIN32
|
||||
unsigned int b1, b2, b3, b4;
|
||||
sscanf(szHost, "%u.%u.%u.%u", &b1, &b2, &b3, &b4);
|
||||
struct in_addr addr = {(unsigned char)b1, (unsigned char)b2,
|
||||
(unsigned char)b3, (unsigned char)b4};
|
||||
#else
|
||||
struct in_addr addr = {inet_addr(szHost)};
|
||||
#endif
|
||||
struct sockaddr_in s = {AF_INET, htons(uPort), addr};
|
||||
server = s;
|
||||
}
|
||||
~UdpOstream() throw() {
|
||||
if (socket == INVALID_SOCKET) {
|
||||
return;
|
||||
}
|
||||
#ifdef _WIN32
|
||||
closesocket(socket);
|
||||
WSACleanup();
|
||||
#else
|
||||
close(socket);
|
||||
#endif
|
||||
}
|
||||
void Flush() {
|
||||
if (sendto(socket, sb.str().c_str(), (int)sb.str().length() + 1, 0,
|
||||
(struct sockaddr *)&server,
|
||||
(int)sizeof(sockaddr_in)) == -1) {
|
||||
fprintf(stderr, "sendto() failed.\n");
|
||||
}
|
||||
sb.str("");
|
||||
}
|
||||
|
||||
private:
|
||||
std::stringbuf sb;
|
||||
SOCKET socket;
|
||||
struct sockaddr_in server;
|
||||
};
|
||||
|
||||
class UdpLogger : public Logger {
|
||||
private:
|
||||
class UdpOstream : public std::ostream {
|
||||
public:
|
||||
UdpOstream(char *szHost, unsigned short uPort) : std::ostream(&sb), socket(INVALID_SOCKET){
|
||||
#ifdef _WIN32
|
||||
WSADATA w;
|
||||
if (WSAStartup(0x0101, &w) != 0) {
|
||||
fprintf(stderr, "WSAStartup() failed.\n");
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
socket = ::socket(AF_INET, SOCK_DGRAM, 0);
|
||||
if (socket == INVALID_SOCKET) {
|
||||
#ifdef _WIN32
|
||||
WSACleanup();
|
||||
#endif
|
||||
fprintf(stderr, "socket() failed.\n");
|
||||
return;
|
||||
}
|
||||
#ifdef _WIN32
|
||||
unsigned int b1, b2, b3, b4;
|
||||
sscanf(szHost, "%u.%u.%u.%u", &b1, &b2, &b3, &b4);
|
||||
struct in_addr addr = {(unsigned char)b1, (unsigned char)b2, (unsigned char)b3, (unsigned char)b4};
|
||||
#else
|
||||
struct in_addr addr = {inet_addr(szHost)};
|
||||
#endif
|
||||
struct sockaddr_in s = {AF_INET, htons(uPort), addr};
|
||||
server = s;
|
||||
}
|
||||
~UdpOstream() throw() {
|
||||
if (socket == INVALID_SOCKET) {
|
||||
return;
|
||||
}
|
||||
#ifdef _WIN32
|
||||
closesocket(socket);
|
||||
WSACleanup();
|
||||
#else
|
||||
close(socket);
|
||||
#endif
|
||||
}
|
||||
void Flush() {
|
||||
if (sendto(socket, sb.str().c_str(), (int)sb.str().length() + 1,
|
||||
0, (struct sockaddr *)&server, (int)sizeof(sockaddr_in)) == -1) {
|
||||
fprintf(stderr, "sendto() failed.\n");
|
||||
}
|
||||
sb.str("");
|
||||
}
|
||||
public:
|
||||
UdpLogger(char *szHost, unsigned uPort, LogLevel level,
|
||||
bool bPrintTimeStamp)
|
||||
: Logger(level, bPrintTimeStamp),
|
||||
udpOut(szHost, (unsigned short)uPort) {}
|
||||
UdpOstream &GetStream() { return udpOut; }
|
||||
virtual void FlushStream() { udpOut.Flush(); }
|
||||
|
||||
private:
|
||||
std::stringbuf sb;
|
||||
SOCKET socket;
|
||||
struct sockaddr_in server;
|
||||
};
|
||||
public:
|
||||
UdpLogger(char *szHost, unsigned uPort, LogLevel level, bool bPrintTimeStamp)
|
||||
: Logger(level, bPrintTimeStamp), udpOut(szHost, (unsigned short)uPort) {}
|
||||
UdpOstream& GetStream() {
|
||||
return udpOut;
|
||||
}
|
||||
virtual void FlushStream() {
|
||||
udpOut.Flush();
|
||||
}
|
||||
private:
|
||||
UdpOstream udpOut;
|
||||
};
|
||||
private:
|
||||
UdpOstream udpOut;
|
||||
};
|
||||
};
|
||||
|
||||
class LogTransaction {
|
||||
public:
|
||||
LogTransaction(Logger *pLogger, LogLevel level, const char *szFile, const int nLine, const char *szFunc) : pLogger(pLogger), level(level) {
|
||||
if (!pLogger) {
|
||||
std::cout << "[-----] ";
|
||||
return;
|
||||
}
|
||||
if (!pLogger->ShouldLogFor(level)) {
|
||||
return;
|
||||
}
|
||||
pLogger->EnterCriticalSection();
|
||||
pLogger->GetStream() << pLogger->GetLead(level, szFile, nLine, szFunc);
|
||||
public:
|
||||
LogTransaction(Logger *pLogger, LogLevel level, const char *szFile,
|
||||
const int nLine, const char *szFunc)
|
||||
: pLogger(pLogger), level(level) {
|
||||
if (!pLogger) {
|
||||
std::cout << "[-----] ";
|
||||
return;
|
||||
}
|
||||
~LogTransaction() {
|
||||
if (!pLogger) {
|
||||
std::cout << std::endl;
|
||||
return;
|
||||
}
|
||||
if (!pLogger->ShouldLogFor(level)) {
|
||||
return;
|
||||
}
|
||||
pLogger->GetStream() << std::endl;
|
||||
pLogger->FlushStream();
|
||||
pLogger->LeaveCriticalSection();
|
||||
if (level == FATAL) {
|
||||
exit(1);
|
||||
}
|
||||
if (!pLogger->ShouldLogFor(level)) {
|
||||
return;
|
||||
}
|
||||
std::ostream& GetStream() {
|
||||
if (!pLogger) {
|
||||
return std::cout;
|
||||
}
|
||||
if (!pLogger->ShouldLogFor(level)) {
|
||||
return ossNull;
|
||||
}
|
||||
return pLogger->GetStream();
|
||||
pLogger->EnterCriticalSection();
|
||||
pLogger->GetStream() << pLogger->GetLead(level, szFile, nLine, szFunc);
|
||||
}
|
||||
~LogTransaction() {
|
||||
if (!pLogger) {
|
||||
std::cout << std::endl;
|
||||
return;
|
||||
}
|
||||
private:
|
||||
Logger *pLogger;
|
||||
LogLevel level;
|
||||
std::ostringstream ossNull;
|
||||
if (!pLogger->ShouldLogFor(level)) {
|
||||
return;
|
||||
}
|
||||
pLogger->GetStream() << std::endl;
|
||||
pLogger->FlushStream();
|
||||
pLogger->LeaveCriticalSection();
|
||||
if (level == FATAL) {
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
std::ostream &GetStream() {
|
||||
if (!pLogger) {
|
||||
return std::cout;
|
||||
}
|
||||
if (!pLogger->ShouldLogFor(level)) {
|
||||
return ossNull;
|
||||
}
|
||||
return pLogger->GetStream();
|
||||
}
|
||||
|
||||
private:
|
||||
Logger *pLogger;
|
||||
LogLevel level;
|
||||
std::ostringstream ossNull;
|
||||
};
|
||||
|
||||
}
|
||||
} // namespace simplelogger
|
||||
|
||||
extern simplelogger::Logger *logger;
|
||||
#define LOG(level) simplelogger::LogTransaction(logger, level, __FILE__, __LINE__, __FUNCTION__).GetStream()
|
||||
#define LOG(level) \
|
||||
simplelogger::LogTransaction(logger, level, __FILE__, __LINE__, \
|
||||
__FUNCTION__) \
|
||||
.GetStream()
|
||||
|
||||
@@ -24,6 +24,8 @@
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
* OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4244)
|
||||
|
||||
#include "NvDecoder.h"
|
||||
|
||||
@@ -222,7 +224,7 @@ int NvDecoder::HandleVideoSequence(CUVIDEOFORMAT *pVideoFormat) {
|
||||
if (!decodecaps.bIsSupported) {
|
||||
NVDEC_THROW_ERROR("Codec not supported on this GPU",
|
||||
CUDA_ERROR_NOT_SUPPORTED);
|
||||
return nDecodeSurface;
|
||||
// return nDecodeSurface;
|
||||
}
|
||||
|
||||
if ((pVideoFormat->coded_width > decodecaps.nMaxWidth) ||
|
||||
@@ -237,7 +239,7 @@ int NvDecoder::HandleVideoSequence(CUVIDEOFORMAT *pVideoFormat) {
|
||||
|
||||
const std::string cErr = errorString.str();
|
||||
NVDEC_THROW_ERROR(cErr, CUDA_ERROR_NOT_SUPPORTED);
|
||||
return nDecodeSurface;
|
||||
// return nDecodeSurface;
|
||||
}
|
||||
|
||||
if ((pVideoFormat->coded_width >> 4) * (pVideoFormat->coded_height >> 4) >
|
||||
@@ -254,7 +256,7 @@ int NvDecoder::HandleVideoSequence(CUVIDEOFORMAT *pVideoFormat) {
|
||||
|
||||
const std::string cErr = errorString.str();
|
||||
NVDEC_THROW_ERROR(cErr, CUDA_ERROR_NOT_SUPPORTED);
|
||||
return nDecodeSurface;
|
||||
// return nDecodeSurface;
|
||||
}
|
||||
|
||||
if (m_nWidth && m_nLumaHeight && m_nChromaHeight) {
|
||||
@@ -571,7 +573,7 @@ int NvDecoder::setReconfigParams(const Rect *pCropRect, const Dim *pResizeDim) {
|
||||
int NvDecoder::HandlePictureDecode(CUVIDPICPARAMS *pPicParams) {
|
||||
if (!m_hDecoder) {
|
||||
NVDEC_THROW_ERROR("Decoder not initialized.", CUDA_ERROR_NOT_INITIALIZED);
|
||||
return false;
|
||||
// return false;
|
||||
}
|
||||
m_nPicNumInDecodeOrder[pPicParams->CurrPicIdx] = m_nDecodePicCnt++;
|
||||
CUDA_DRVAPI_CALL(cuCtxPushCurrent(m_cuContext));
|
||||
@@ -921,3 +923,4 @@ void NvDecoder::UnlockFrame(uint8_t **pFrame) {
|
||||
uint64_t timestamp[2] = {0};
|
||||
m_vTimestamp.insert(m_vTimestamp.end(), ×tamp[0], ×tamp[1]);
|
||||
}
|
||||
#pragma warning(pop)
|
||||
@@ -769,12 +769,11 @@ uint32_t NvEncoder::GetWidthInBytes(const NV_ENC_BUFFER_FORMAT bufferFormat,
|
||||
return width * 4;
|
||||
default:
|
||||
NVENC_THROW_ERROR("Invalid Buffer format", NV_ENC_ERR_INVALID_PARAM);
|
||||
return 0;
|
||||
// return 0;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t NvEncoder::GetNumChromaPlanes(
|
||||
const NV_ENC_BUFFER_FORMAT bufferFormat) {
|
||||
int32_t NvEncoder::GetNumChromaPlanes(const NV_ENC_BUFFER_FORMAT bufferFormat) {
|
||||
switch (bufferFormat) {
|
||||
case NV_ENC_BUFFER_FORMAT_NV12:
|
||||
case NV_ENC_BUFFER_FORMAT_YUV420_10BIT:
|
||||
@@ -792,12 +791,12 @@ uint32_t NvEncoder::GetNumChromaPlanes(
|
||||
return 0;
|
||||
default:
|
||||
NVENC_THROW_ERROR("Invalid Buffer format", NV_ENC_ERR_INVALID_PARAM);
|
||||
return -1;
|
||||
// return -1;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t NvEncoder::GetChromaPitch(const NV_ENC_BUFFER_FORMAT bufferFormat,
|
||||
const uint32_t lumaPitch) {
|
||||
int32_t NvEncoder::GetChromaPitch(const NV_ENC_BUFFER_FORMAT bufferFormat,
|
||||
const uint32_t lumaPitch) {
|
||||
switch (bufferFormat) {
|
||||
case NV_ENC_BUFFER_FORMAT_NV12:
|
||||
case NV_ENC_BUFFER_FORMAT_YUV420_10BIT:
|
||||
@@ -815,7 +814,7 @@ uint32_t NvEncoder::GetChromaPitch(const NV_ENC_BUFFER_FORMAT bufferFormat,
|
||||
return 0;
|
||||
default:
|
||||
NVENC_THROW_ERROR("Invalid Buffer format", NV_ENC_ERR_INVALID_PARAM);
|
||||
return -1;
|
||||
// return -1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -871,7 +870,7 @@ uint32_t NvEncoder::GetChromaHeight(const NV_ENC_BUFFER_FORMAT bufferFormat,
|
||||
return 0;
|
||||
default:
|
||||
NVENC_THROW_ERROR("Invalid Buffer format", NV_ENC_ERR_INVALID_PARAM);
|
||||
return 0;
|
||||
// return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -897,7 +896,7 @@ uint32_t NvEncoder::GetChromaWidthInBytes(
|
||||
return 0;
|
||||
default:
|
||||
NVENC_THROW_ERROR("Invalid Buffer format", NV_ENC_ERR_INVALID_PARAM);
|
||||
return 0;
|
||||
// return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -934,7 +933,7 @@ int NvEncoder::GetFrameSize() const {
|
||||
return 4 * GetEncodeWidth() * GetEncodeHeight();
|
||||
default:
|
||||
NVENC_THROW_ERROR("Invalid Buffer format", NV_ENC_ERR_INVALID_PARAM);
|
||||
return 0;
|
||||
// return 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -317,14 +317,14 @@ class NvEncoder {
|
||||
* @brief This a static function to get the chroma plane pitch for YUV planar
|
||||
* formats.
|
||||
*/
|
||||
static uint32_t GetChromaPitch(const NV_ENC_BUFFER_FORMAT bufferFormat,
|
||||
const uint32_t lumaPitch);
|
||||
static int32_t GetChromaPitch(const NV_ENC_BUFFER_FORMAT bufferFormat,
|
||||
const uint32_t lumaPitch);
|
||||
|
||||
/**
|
||||
* @brief This a static function to get the number of chroma planes for YUV
|
||||
* planar formats.
|
||||
*/
|
||||
static uint32_t GetNumChromaPlanes(const NV_ENC_BUFFER_FORMAT bufferFormat);
|
||||
static int32_t GetNumChromaPlanes(const NV_ENC_BUFFER_FORMAT bufferFormat);
|
||||
|
||||
/**
|
||||
* @brief This a static function to get the chroma plane width in bytes for
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,23 +2,27 @@
|
||||
|
||||
#include "log.h"
|
||||
|
||||
#define SAVE_RECEIVED_AV1_STREAM 0
|
||||
#define SAVE_DECODED_NV12_STREAM 0
|
||||
// #define SAVE_DECODED_NV12_STREAM
|
||||
// #define SAVE_RECEIVED_AV1_STREAM
|
||||
|
||||
AomAv1Decoder::AomAv1Decoder() {}
|
||||
|
||||
AomAv1Decoder::~AomAv1Decoder() {
|
||||
if (SAVE_RECEIVED_AV1_STREAM && file_av1_) {
|
||||
fflush(file_av1_);
|
||||
fclose(file_av1_);
|
||||
file_av1_ = nullptr;
|
||||
}
|
||||
|
||||
if (SAVE_DECODED_NV12_STREAM && file_nv12_) {
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
if (file_nv12_) {
|
||||
fflush(file_nv12_);
|
||||
fclose(file_nv12_);
|
||||
file_nv12_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef SAVE_RECEIVED_AV1_STREAM
|
||||
if (file_av1_) {
|
||||
fflush(file_av1_);
|
||||
fclose(file_av1_);
|
||||
file_av1_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (nv12_frame_) {
|
||||
delete nv12_frame_;
|
||||
@@ -43,29 +47,29 @@ int AomAv1Decoder::Init() {
|
||||
aom_codec_control(&aom_av1_decoder_ctx_, AV1D_GET_IMG_FORMAT,
|
||||
AOM_IMG_FMT_NV12);
|
||||
|
||||
if (SAVE_RECEIVED_AV1_STREAM) {
|
||||
file_av1_ = fopen("received_av1_stream.ivf", "w+b");
|
||||
if (!file_av1_) {
|
||||
LOG_WARN("Fail to open received_av1_stream.ivf");
|
||||
}
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
file_nv12_ = fopen("decoded_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_WARN("Fail to open decoded_nv12_stream.yuv");
|
||||
}
|
||||
#endif
|
||||
|
||||
if (SAVE_DECODED_NV12_STREAM) {
|
||||
file_nv12_ = fopen("decoded_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_WARN("Fail to open decoded_nv12_stream.yuv");
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_AV1_STREAM
|
||||
file_av1_ = fopen("received_av1_stream.ivf", "w+b");
|
||||
if (!file_av1_) {
|
||||
LOG_WARN("Fail to open received_av1_stream.ivf");
|
||||
}
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int AomAv1Decoder::Decode(
|
||||
const uint8_t *data, int size,
|
||||
const uint8_t *data, size_t size,
|
||||
std::function<void(VideoFrame)> on_receive_decoded_frame) {
|
||||
if (SAVE_RECEIVED_AV1_STREAM) {
|
||||
fwrite((unsigned char *)data, 1, size, file_av1_);
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_AV1_STREAM
|
||||
fwrite((unsigned char *)data, 1, size, file_av1_);
|
||||
#endif
|
||||
|
||||
aom_codec_iter_t iter = nullptr;
|
||||
aom_codec_err_t ret =
|
||||
@@ -105,8 +109,8 @@ int AomAv1Decoder::Decode(
|
||||
}
|
||||
}
|
||||
int corrupted = 0;
|
||||
int ret = aom_codec_control(&aom_av1_decoder_ctx_, AOMD_GET_FRAME_CORRUPTED,
|
||||
&corrupted);
|
||||
ret = aom_codec_control(&aom_av1_decoder_ctx_, AOMD_GET_FRAME_CORRUPTED,
|
||||
&corrupted);
|
||||
if (ret != AOM_CODEC_OK) {
|
||||
LOG_ERROR("Failed to get frame corrupted");
|
||||
return -1;
|
||||
@@ -140,10 +144,10 @@ int AomAv1Decoder::Decode(
|
||||
|
||||
on_receive_decoded_frame(*nv12_frame_);
|
||||
|
||||
if (SAVE_DECODED_NV12_STREAM) {
|
||||
fwrite((unsigned char *)nv12_frame_->Buffer(), 1, nv12_frame_->Size(),
|
||||
file_nv12_);
|
||||
}
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
fwrite((unsigned char *)nv12_frame_->Buffer(), 1, nv12_frame_->Size(),
|
||||
file_nv12_);
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ class AomAv1Decoder : public VideoDecoder {
|
||||
public:
|
||||
int Init();
|
||||
|
||||
int Decode(const uint8_t *data, int size,
|
||||
int Decode(const uint8_t *data, size_t size,
|
||||
std::function<void(VideoFrame)> on_receive_decoded_frame);
|
||||
|
||||
std::string GetDecoderName() { return "AomAv1"; }
|
||||
@@ -32,8 +32,8 @@ class AomAv1Decoder : public VideoDecoder {
|
||||
int nv12_frame_capacity_ = 0;
|
||||
int nv12_frame_size_ = 0;
|
||||
|
||||
int frame_width_ = 0;
|
||||
int frame_height_ = 0;
|
||||
uint32_t frame_width_ = 0;
|
||||
uint32_t frame_height_ = 0;
|
||||
|
||||
FILE *file_av1_ = nullptr;
|
||||
FILE *file_nv12_ = nullptr;
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
#include "log.h"
|
||||
|
||||
#define SAVE_RECEIVED_AV1_STREAM 0
|
||||
#define SAVE_DECODED_NV12_STREAM 0
|
||||
// #define SAVE_DECODED_NV12_STREAM
|
||||
// #define SAVE_RECEIVED_AV1_STREAM
|
||||
|
||||
#include "libyuv.h"
|
||||
|
||||
@@ -28,7 +28,8 @@ class ScopedDav1dData {
|
||||
};
|
||||
|
||||
// Calling `dav1d_data_wrap` requires a `free_callback` to be registered.
|
||||
void NullFreeCallback(const uint8_t *buffer, void *opaque) {}
|
||||
void NullFreeCallback([[maybe_unused]] const uint8_t *buffer,
|
||||
[[maybe_unused]] void *opaque) {}
|
||||
|
||||
void Yuv420pToNv12(unsigned char *SrcY, unsigned char *SrcU,
|
||||
unsigned char *SrcV, int y_stride, int uv_stride,
|
||||
@@ -49,17 +50,21 @@ void Yuv420pToNv12(unsigned char *SrcY, unsigned char *SrcU,
|
||||
Dav1dAv1Decoder::Dav1dAv1Decoder() {}
|
||||
|
||||
Dav1dAv1Decoder::~Dav1dAv1Decoder() {
|
||||
if (SAVE_RECEIVED_AV1_STREAM && file_av1_) {
|
||||
fflush(file_av1_);
|
||||
fclose(file_av1_);
|
||||
file_av1_ = nullptr;
|
||||
}
|
||||
|
||||
if (SAVE_DECODED_NV12_STREAM && file_nv12_) {
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
if (file_nv12_) {
|
||||
fflush(file_nv12_);
|
||||
fclose(file_nv12_);
|
||||
file_nv12_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef SAVE_RECEIVED_AV1_STREAM
|
||||
if (file_av1_) {
|
||||
fflush(file_av1_);
|
||||
fclose(file_av1_);
|
||||
file_av1_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (nv12_frame_) {
|
||||
delete nv12_frame_;
|
||||
@@ -83,29 +88,29 @@ int Dav1dAv1Decoder::Init() {
|
||||
LOG_ERROR("Dav1d AV1 decoder open failed");
|
||||
}
|
||||
|
||||
if (SAVE_RECEIVED_AV1_STREAM) {
|
||||
file_av1_ = fopen("received_av1_stream.ivf", "w+b");
|
||||
if (!file_av1_) {
|
||||
LOG_WARN("Fail to open received_av1_stream.ivf");
|
||||
}
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
file_nv12_ = fopen("decoded_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_WARN("Fail to open decoded_nv12_stream.yuv");
|
||||
}
|
||||
#endif
|
||||
|
||||
if (SAVE_DECODED_NV12_STREAM) {
|
||||
file_nv12_ = fopen("decoded_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_WARN("Fail to open decoded_nv12_stream.yuv");
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_AV1_STREAM
|
||||
file_av1_ = fopen("received_av1_stream.ivf", "w+b");
|
||||
if (!file_av1_) {
|
||||
LOG_WARN("Fail to open received_av1_stream.ivf");
|
||||
}
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int Dav1dAv1Decoder::Decode(
|
||||
const uint8_t *data, int size,
|
||||
const uint8_t *data, size_t size,
|
||||
std::function<void(VideoFrame)> on_receive_decoded_frame) {
|
||||
if (SAVE_RECEIVED_AV1_STREAM) {
|
||||
fwrite((unsigned char *)data, 1, size, file_av1_);
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_AV1_STREAM
|
||||
fwrite((unsigned char *)data, 1, size, file_av1_);
|
||||
#endif
|
||||
|
||||
ScopedDav1dData scoped_dav1d_data;
|
||||
Dav1dData &dav1d_data = scoped_dav1d_data.Data();
|
||||
@@ -176,14 +181,14 @@ int Dav1dAv1Decoder::Decode(
|
||||
Yuv420pToNv12((unsigned char *)dav1d_picture.data[0],
|
||||
(unsigned char *)dav1d_picture.data[1],
|
||||
(unsigned char *)dav1d_picture.data[2],
|
||||
dav1d_picture.stride[0], dav1d_picture.stride[1],
|
||||
(int)dav1d_picture.stride[0], (int)dav1d_picture.stride[1],
|
||||
(unsigned char *)nv12_frame_->Buffer(), frame_width_,
|
||||
frame_height_);
|
||||
} else {
|
||||
libyuv::I420ToNV12(
|
||||
(const uint8_t *)dav1d_picture.data[0], dav1d_picture.stride[0],
|
||||
(const uint8_t *)dav1d_picture.data[1], dav1d_picture.stride[1],
|
||||
(const uint8_t *)dav1d_picture.data[2], dav1d_picture.stride[1],
|
||||
(const uint8_t *)dav1d_picture.data[0], (int)dav1d_picture.stride[0],
|
||||
(const uint8_t *)dav1d_picture.data[1], (int)dav1d_picture.stride[1],
|
||||
(const uint8_t *)dav1d_picture.data[2], (int)dav1d_picture.stride[1],
|
||||
(uint8_t *)nv12_frame_->Buffer(), frame_width_,
|
||||
(uint8_t *)nv12_frame_->Buffer() + frame_width_ * frame_height_,
|
||||
frame_width_, frame_width_, frame_height_);
|
||||
@@ -191,10 +196,10 @@ int Dav1dAv1Decoder::Decode(
|
||||
|
||||
on_receive_decoded_frame(*nv12_frame_);
|
||||
|
||||
if (SAVE_DECODED_NV12_STREAM) {
|
||||
fwrite((unsigned char *)nv12_frame_->Buffer(), 1, nv12_frame_->Size(),
|
||||
file_nv12_);
|
||||
}
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
fwrite((unsigned char *)nv12_frame_->Buffer(), 1, nv12_frame_->Size(),
|
||||
file_nv12_);
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -20,18 +20,18 @@ class Dav1dAv1Decoder : public VideoDecoder {
|
||||
public:
|
||||
int Init();
|
||||
|
||||
int Decode(const uint8_t *data, int size,
|
||||
int Decode(const uint8_t *data, size_t size,
|
||||
std::function<void(VideoFrame)> on_receive_decoded_frame);
|
||||
|
||||
std::string GetDecoderName() { return "Dav1dAv1"; }
|
||||
|
||||
private:
|
||||
VideoFrame *nv12_frame_ = 0;
|
||||
int nv12_frame_capacity_ = 0;
|
||||
int nv12_frame_size_ = 0;
|
||||
size_t nv12_frame_capacity_ = 0;
|
||||
size_t nv12_frame_size_ = 0;
|
||||
|
||||
int frame_width_ = 0;
|
||||
int frame_height_ = 0;
|
||||
uint32_t frame_width_ = 0;
|
||||
uint32_t frame_height_ = 0;
|
||||
|
||||
FILE *file_av1_ = nullptr;
|
||||
FILE *file_nv12_ = nullptr;
|
||||
|
||||
@@ -3,22 +3,26 @@
|
||||
#include "log.h"
|
||||
#include "nvcodec_api.h"
|
||||
|
||||
#define SAVE_RECEIVED_H264_STREAM 0
|
||||
#define SAVE_DECODED_NV12_STREAM 0
|
||||
// #define SAVE_DECODED_NV12_STREAM
|
||||
// #define SAVE_RECEIVED_H264_STREAM
|
||||
|
||||
NvidiaVideoDecoder::NvidiaVideoDecoder() {}
|
||||
NvidiaVideoDecoder::~NvidiaVideoDecoder() {
|
||||
if (SAVE_RECEIVED_H264_STREAM && file_h264_) {
|
||||
fflush(file_h264_);
|
||||
fclose(file_h264_);
|
||||
file_h264_ = nullptr;
|
||||
}
|
||||
|
||||
if (SAVE_DECODED_NV12_STREAM && file_nv12_) {
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
if (file_nv12_) {
|
||||
fflush(file_nv12_);
|
||||
fclose(file_nv12_);
|
||||
file_nv12_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef SAVE_RECEIVED_H264_STREAM
|
||||
if (file_h264_) {
|
||||
fflush(file_h264_);
|
||||
fclose(file_h264_);
|
||||
file_h264_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
int NvidiaVideoDecoder::Init() {
|
||||
@@ -42,55 +46,55 @@ int NvidiaVideoDecoder::Init() {
|
||||
|
||||
decoder = new NvDecoder(cuContext, false, cudaVideoCodec_H264, true);
|
||||
|
||||
if (SAVE_RECEIVED_H264_STREAM) {
|
||||
file_h264_ = fopen("received_h264_stream.h264", "w+b");
|
||||
if (!file_h264_) {
|
||||
LOG_WARN("Fail to open received_h264_stream.h264");
|
||||
}
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
file_nv12_ = fopen("decoded_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_WARN("Fail to open decoded_nv12_stream.yuv");
|
||||
}
|
||||
#endif
|
||||
|
||||
if (SAVE_DECODED_NV12_STREAM) {
|
||||
file_nv12_ = fopen("decoded_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_WARN("Fail to open decoded_nv12_stream.yuv");
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_H264_STREAM
|
||||
file_h264_ = fopen("received_h264_stream.h264", "w+b");
|
||||
if (!file_h264_) {
|
||||
LOG_WARN("Fail to open received_h264_stream.h264");
|
||||
}
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int NvidiaVideoDecoder::Decode(
|
||||
const uint8_t *data, int size,
|
||||
const uint8_t *data, size_t size,
|
||||
std::function<void(VideoFrame)> on_receive_decoded_frame) {
|
||||
if (!decoder) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (SAVE_RECEIVED_H264_STREAM) {
|
||||
fwrite((unsigned char *)data, 1, size, file_h264_);
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_H264_STREAM
|
||||
fwrite((unsigned char *)data, 1, size, file_h264_);
|
||||
#endif
|
||||
|
||||
if ((*(data + 4) & 0x1f) == 0x07) {
|
||||
// LOG_WARN("Receive key frame");
|
||||
}
|
||||
|
||||
int num_frame_returned = decoder->Decode(data, size);
|
||||
|
||||
int num_frame_returned = decoder->Decode(data, (int)size);
|
||||
for (size_t i = 0; i < num_frame_returned; ++i) {
|
||||
cudaVideoSurfaceFormat format = decoder->GetOutputFormat();
|
||||
if (format == cudaVideoSurfaceFormat_NV12) {
|
||||
uint8_t *data = nullptr;
|
||||
data = decoder->GetFrame();
|
||||
if (data) {
|
||||
uint8_t *decoded_frame_buffer = nullptr;
|
||||
decoded_frame_buffer = decoder->GetFrame();
|
||||
if (decoded_frame_buffer) {
|
||||
if (on_receive_decoded_frame) {
|
||||
VideoFrame decoded_frame(
|
||||
data, decoder->GetWidth() * decoder->GetHeight() * 3 / 2,
|
||||
decoded_frame_buffer,
|
||||
decoder->GetWidth() * decoder->GetHeight() * 3 / 2,
|
||||
decoder->GetWidth(), decoder->GetHeight());
|
||||
on_receive_decoded_frame(decoded_frame);
|
||||
if (SAVE_DECODED_NV12_STREAM) {
|
||||
fwrite((unsigned char *)decoded_frame.Buffer(), 1,
|
||||
decoded_frame.Size(), file_nv12_);
|
||||
}
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
fwrite((unsigned char *)decoded_frame.Buffer(), 1,
|
||||
decoded_frame.Size(), file_nv12_);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ class NvidiaVideoDecoder : public VideoDecoder {
|
||||
public:
|
||||
int Init();
|
||||
|
||||
int Decode(const uint8_t* data, int size,
|
||||
int Decode(const uint8_t* data, size_t size,
|
||||
std::function<void(VideoFrame)> on_receive_decoded_frame);
|
||||
|
||||
std::string GetDecoderName() { return "NvidiaH264"; }
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
#include "libyuv.h"
|
||||
#include "log.h"
|
||||
|
||||
#define SAVE_NV12_STREAM 0
|
||||
#define SAVE_H264_STREAM 0
|
||||
// #define SAVE_DECODED_NV12_STREAM
|
||||
// #define SAVE_RECEIVED_H264_STREAM
|
||||
|
||||
void CopyYuvWithStride(uint8_t *src_y, uint8_t *src_u, uint8_t *src_v,
|
||||
int width, int height, int stride_y, int stride_u,
|
||||
@@ -65,31 +65,35 @@ OpenH264Decoder::~OpenH264Decoder() {
|
||||
delete[] yuv420p_frame_;
|
||||
}
|
||||
|
||||
if (SAVE_H264_STREAM && h264_stream_) {
|
||||
fflush(h264_stream_);
|
||||
h264_stream_ = nullptr;
|
||||
}
|
||||
|
||||
if (SAVE_NV12_STREAM && nv12_stream_) {
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
if (nv12_stream_) {
|
||||
fflush(nv12_stream_);
|
||||
nv12_stream_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef SAVE_RECEIVED_H264_STREAM
|
||||
if (h264_stream_) {
|
||||
fflush(h264_stream_);
|
||||
h264_stream_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
int OpenH264Decoder::Init() {
|
||||
if (SAVE_NV12_STREAM) {
|
||||
nv12_stream_ = fopen("nv12_receive_.yuv", "w+b");
|
||||
if (!nv12_stream_) {
|
||||
LOG_WARN("Fail to open nv12_receive_.yuv");
|
||||
}
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
nv12_stream_ = fopen("nv12_receive_.yuv", "w+b");
|
||||
if (!nv12_stream_) {
|
||||
LOG_WARN("Fail to open nv12_receive_.yuv");
|
||||
}
|
||||
#endif
|
||||
|
||||
if (SAVE_NV12_STREAM) {
|
||||
h264_stream_ = fopen("h264_receive.h264", "w+b");
|
||||
if (!h264_stream_) {
|
||||
LOG_WARN("Fail to open h264_receive.h264");
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_H264_STREAM
|
||||
h264_stream_ = fopen("h264_receive.h264", "w+b");
|
||||
if (!h264_stream_) {
|
||||
LOG_WARN("Fail to open h264_receive.h264");
|
||||
}
|
||||
#endif
|
||||
|
||||
frame_width_ = 1280;
|
||||
frame_height_ = 720;
|
||||
@@ -115,15 +119,15 @@ int OpenH264Decoder::Init() {
|
||||
}
|
||||
|
||||
int OpenH264Decoder::Decode(
|
||||
const uint8_t *data, int size,
|
||||
const uint8_t *data, size_t size,
|
||||
std::function<void(VideoFrame)> on_receive_decoded_frame) {
|
||||
if (!openh264_decoder_) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (SAVE_H264_STREAM) {
|
||||
fwrite((unsigned char *)data, 1, size, h264_stream_);
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_H264_STREAM
|
||||
fwrite((unsigned char *)data, 1, size, h264_stream_);
|
||||
#endif
|
||||
|
||||
if ((*(data + 4) & 0x1f) == 0x07) {
|
||||
// LOG_WARN("Receive key frame");
|
||||
@@ -132,7 +136,7 @@ int OpenH264Decoder::Decode(
|
||||
SBufferInfo sDstBufInfo;
|
||||
memset(&sDstBufInfo, 0, sizeof(SBufferInfo));
|
||||
|
||||
openh264_decoder_->DecodeFrameNoDelay(data, size, yuv420p_planes_,
|
||||
openh264_decoder_->DecodeFrameNoDelay(data, (int)size, yuv420p_planes_,
|
||||
&sDstBufInfo);
|
||||
|
||||
frame_width_ = sDstBufInfo.UsrData.sSystemBuffer.iWidth;
|
||||
@@ -200,10 +204,10 @@ int OpenH264Decoder::Decode(
|
||||
|
||||
on_receive_decoded_frame(*nv12_frame_);
|
||||
|
||||
if (SAVE_NV12_STREAM) {
|
||||
fwrite((unsigned char *)nv12_frame_->Buffer(), 1, nv12_frame_->Size(),
|
||||
nv12_stream_);
|
||||
}
|
||||
#ifdef SAVE_DECODED_NV12_STREAM
|
||||
fwrite((unsigned char *)nv12_frame_->Buffer(), 1, nv12_frame_->Size(),
|
||||
nv12_stream_);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ class OpenH264Decoder : public VideoDecoder {
|
||||
public:
|
||||
int Init();
|
||||
|
||||
int Decode(const uint8_t* data, int size,
|
||||
int Decode(const uint8_t* data, size_t size,
|
||||
std::function<void(VideoFrame)> on_receive_decoded_frame);
|
||||
|
||||
std::string GetDecoderName() { return "OpenH264"; }
|
||||
@@ -37,8 +37,8 @@ class OpenH264Decoder : public VideoDecoder {
|
||||
FILE* h264_stream_ = nullptr;
|
||||
uint8_t* decoded_frame_ = nullptr;
|
||||
int decoded_frame_size_ = 0;
|
||||
int frame_width_ = 1280;
|
||||
int frame_height_ = 720;
|
||||
uint32_t frame_width_ = 1280;
|
||||
uint32_t frame_height_ = 720;
|
||||
|
||||
unsigned char* yuv420p_planes_[3] = {nullptr, nullptr, nullptr};
|
||||
unsigned char* yuv420p_frame_ = nullptr;
|
||||
|
||||
@@ -20,7 +20,7 @@ class VideoDecoder {
|
||||
virtual int Init() = 0;
|
||||
|
||||
virtual int Decode(
|
||||
const uint8_t *data, int size,
|
||||
const uint8_t *data, size_t size,
|
||||
std::function<void(VideoFrame)> on_receive_decoded_frame) = 0;
|
||||
|
||||
virtual std::string GetDecoderName() = 0;
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
|
||||
#include "log.h"
|
||||
|
||||
#define SAVE_RECEIVED_NV12_STREAM 0
|
||||
#define SAVE_ENCODED_AV1_STREAM 0
|
||||
// #define SAVE_RECEIVED_NV12_STREAM
|
||||
// #define SAVE_ENCODED_AV1_STREAM
|
||||
|
||||
#define SET_ENCODER_PARAM_OR_RETURN_ERROR(param_id, param_value) \
|
||||
do { \
|
||||
@@ -104,17 +104,21 @@ int AomAv1Encoder::ResetEncodeResolution(unsigned int width,
|
||||
AomAv1Encoder::AomAv1Encoder() {}
|
||||
|
||||
AomAv1Encoder::~AomAv1Encoder() {
|
||||
if (SAVE_RECEIVED_NV12_STREAM && file_nv12_) {
|
||||
#ifdef SAVE_RECEIVED_NV12_STREAM
|
||||
if (file_nv12_) {
|
||||
fflush(file_nv12_);
|
||||
fclose(file_nv12_);
|
||||
file_nv12_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (SAVE_ENCODED_AV1_STREAM && file_av1_) {
|
||||
#ifdef SAVE_ENCODED_AV1_STREAM
|
||||
if (file_av1_) {
|
||||
fflush(file_av1_);
|
||||
fclose(file_av1_);
|
||||
file_av1_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
delete[] encoded_frame_;
|
||||
encoded_frame_ = nullptr;
|
||||
@@ -245,19 +249,19 @@ int AomAv1Encoder::Init() {
|
||||
frame_for_encode_ = aom_img_wrap(nullptr, AOM_IMG_FMT_NV12, frame_width_,
|
||||
frame_height_, 1, nullptr);
|
||||
|
||||
if (SAVE_RECEIVED_NV12_STREAM) {
|
||||
file_nv12_ = fopen("received_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_ERROR("Fail to open received_nv12_stream.yuv");
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_NV12_STREAM
|
||||
file_nv12_ = fopen("received_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_ERROR("Fail to open received_nv12_stream.yuv");
|
||||
}
|
||||
#endif
|
||||
|
||||
if (SAVE_ENCODED_AV1_STREAM) {
|
||||
file_av1_ = fopen("encoded_av1_stream.ivf", "w+b");
|
||||
if (!file_av1_) {
|
||||
LOG_ERROR("Fail to open encoded_av1_stream.ivf");
|
||||
}
|
||||
#ifdef SAVE_ENCODED_AV1_STREAM
|
||||
file_av1_ = fopen("encoded_av1_stream.ivf", "w+b");
|
||||
if (!file_av1_) {
|
||||
LOG_ERROR("Fail to open encoded_av1_stream.ivf");
|
||||
}
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -266,9 +270,9 @@ int AomAv1Encoder::Encode(const XVideoFrame *video_frame,
|
||||
std::function<int(char *encoded_packets, size_t size,
|
||||
VideoFrameType frame_type)>
|
||||
on_encoded_image) {
|
||||
if (SAVE_RECEIVED_NV12_STREAM) {
|
||||
fwrite(video_frame->data, 1, video_frame->size, file_nv12_);
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_NV12_STREAM
|
||||
fwrite(video_frame->data, 1, video_frame->size, file_nv12_);
|
||||
#endif
|
||||
|
||||
aom_codec_err_t ret = AOM_CODEC_OK;
|
||||
|
||||
@@ -293,7 +297,7 @@ int AomAv1Encoder::Encode(const XVideoFrame *video_frame,
|
||||
}
|
||||
|
||||
const uint32_t duration =
|
||||
kRtpTicksPerSecond / static_cast<float>(max_frame_rate_);
|
||||
(uint32_t)(kRtpTicksPerSecond / static_cast<float>(max_frame_rate_));
|
||||
timestamp_ += duration;
|
||||
|
||||
frame_for_encode_->planes[AOM_PLANE_Y] = (unsigned char *)(video_frame->data);
|
||||
@@ -327,7 +331,6 @@ int AomAv1Encoder::Encode(const XVideoFrame *video_frame,
|
||||
}
|
||||
|
||||
aom_codec_iter_t iter = nullptr;
|
||||
int data_pkt_count = 0;
|
||||
while (const aom_codec_cx_pkt_t *pkt =
|
||||
aom_codec_get_cx_data(&aom_av1_encoder_ctx_, &iter)) {
|
||||
if (pkt->kind == AOM_CODEC_CX_FRAME_PKT && pkt->data.frame.sz > 0) {
|
||||
@@ -341,11 +344,9 @@ int AomAv1Encoder::Encode(const XVideoFrame *video_frame,
|
||||
if (on_encoded_image) {
|
||||
on_encoded_image((char *)encoded_frame_, encoded_frame_size_,
|
||||
frame_type);
|
||||
if (SAVE_ENCODED_AV1_STREAM) {
|
||||
fwrite(encoded_frame_, 1, encoded_frame_size_, file_av1_);
|
||||
}
|
||||
} else {
|
||||
OnEncodedImage((char *)encoded_frame_, encoded_frame_size_);
|
||||
#ifdef SAVE_ENCODED_AV1_STREAM
|
||||
fwrite(encoded_frame_, 1, encoded_frame_size_, file_av1_);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -353,11 +354,6 @@ int AomAv1Encoder::Encode(const XVideoFrame *video_frame,
|
||||
return 0;
|
||||
}
|
||||
|
||||
int AomAv1Encoder::OnEncodedImage(char *encoded_packets, size_t size) {
|
||||
LOG_INFO("OnEncodedImage not implemented");
|
||||
return 0;
|
||||
}
|
||||
|
||||
int AomAv1Encoder::ForceIdr() {
|
||||
force_i_frame_flags_ = AOM_EFLAG_FORCE_KF;
|
||||
return 0;
|
||||
|
||||
@@ -36,20 +36,12 @@ class AomAv1Encoder : public VideoEncoder {
|
||||
|
||||
public:
|
||||
int Init();
|
||||
int Encode(const uint8_t* pData, int nSize,
|
||||
std::function<int(char* encoded_packets, size_t size,
|
||||
VideoFrameType frame_type)>
|
||||
on_encoded_image) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int Encode(const XVideoFrame* video_frame,
|
||||
std::function<int(char* encoded_packets, size_t size,
|
||||
VideoFrameType frame_type)>
|
||||
on_encoded_image);
|
||||
|
||||
int OnEncodedImage(char* encoded_packets, size_t size);
|
||||
|
||||
int ForceIdr();
|
||||
|
||||
std::string GetEncoderName() { return "AomAV1"; }
|
||||
@@ -65,8 +57,8 @@ class AomAv1Encoder : public VideoEncoder {
|
||||
int Release();
|
||||
|
||||
private:
|
||||
int frame_width_ = 1280;
|
||||
int frame_height_ = 720;
|
||||
uint32_t frame_width_ = 1280;
|
||||
uint32_t frame_height_ = 720;
|
||||
int key_frame_interval_ = 300;
|
||||
int target_bitrate_ = 1000;
|
||||
int max_bitrate_ = 2500000;
|
||||
@@ -91,7 +83,7 @@ class AomAv1Encoder : public VideoEncoder {
|
||||
aom_enc_frame_flags_t force_i_frame_flags_ = 0;
|
||||
uint8_t* encoded_frame_ = nullptr;
|
||||
size_t encoded_frame_capacity_ = 0;
|
||||
int encoded_frame_size_ = 0;
|
||||
size_t encoded_frame_size_ = 0;
|
||||
};
|
||||
|
||||
#endif
|
||||
@@ -6,22 +6,26 @@
|
||||
#include "nvcodec_api.h"
|
||||
#include "nvcodec_common.h"
|
||||
|
||||
#define SAVE_RECEIVED_NV12_STREAM 0
|
||||
#define SAVE_ENCODED_H264_STREAM 0
|
||||
// #define SAVE_RECEIVED_NV12_STREAM
|
||||
// #define SAVE_ENCODED_H264_STREAM
|
||||
|
||||
NvidiaVideoEncoder::NvidiaVideoEncoder() {}
|
||||
NvidiaVideoEncoder::~NvidiaVideoEncoder() {
|
||||
if (SAVE_RECEIVED_NV12_STREAM && file_nv12_) {
|
||||
#ifdef SAVE_RECEIVED_NV12_STREAM
|
||||
if (file_nv12_) {
|
||||
fflush(file_nv12_);
|
||||
fclose(file_nv12_);
|
||||
file_nv12_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (SAVE_ENCODED_H264_STREAM && file_h264_) {
|
||||
#ifdef SAVE_ENCODED_H264_STREAM
|
||||
if (file_h264_) {
|
||||
fflush(file_h264_);
|
||||
fclose(file_h264_);
|
||||
file_h264_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (nv12_data_) {
|
||||
free(nv12_data_);
|
||||
@@ -106,19 +110,20 @@ int NvidiaVideoEncoder::Init() {
|
||||
|
||||
encoder_->CreateEncoder(&init_params);
|
||||
|
||||
if (SAVE_RECEIVED_NV12_STREAM) {
|
||||
file_nv12_ = fopen("received_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_WARN("Fail to open received_nv12_stream.yuv");
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_NV12_STREAM
|
||||
file_nv12_ = fopen("received_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_WARN("Fail to open received_nv12_stream.yuv");
|
||||
}
|
||||
|
||||
if (SAVE_ENCODED_H264_STREAM) {
|
||||
file_h264_ = fopen("encoded_h264_stream.h264", "w+b");
|
||||
if (!file_h264_) {
|
||||
LOG_WARN("Fail to open encoded_h264_stream.h264");
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef SAVE_ENCODED_H264_STREAM
|
||||
file_h264_ = fopen("encoded_h264_stream.h264", "w+b");
|
||||
if (!file_h264_) {
|
||||
LOG_WARN("Fail to open encoded_h264_stream.h264");
|
||||
}
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -133,9 +138,9 @@ int NvidiaVideoEncoder::Encode(
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (SAVE_RECEIVED_NV12_STREAM) {
|
||||
fwrite(video_frame->data, 1, video_frame->size, file_nv12_);
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_NV12_STREAM
|
||||
fwrite(video_frame->data, 1, video_frame->size, file_nv12_);
|
||||
#endif
|
||||
|
||||
if (video_frame->width != frame_width_ ||
|
||||
video_frame->height != frame_height_) {
|
||||
@@ -178,11 +183,9 @@ int NvidiaVideoEncoder::Encode(
|
||||
for (const auto &packet : encoded_packets_) {
|
||||
if (on_encoded_image) {
|
||||
on_encoded_image((char *)packet.data(), packet.size(), frame_type);
|
||||
if (SAVE_ENCODED_H264_STREAM) {
|
||||
fwrite((unsigned char *)packet.data(), 1, packet.size(), file_h264_);
|
||||
}
|
||||
} else {
|
||||
OnEncodedImage((char *)packet.data(), packet.size());
|
||||
#ifdef SAVE_ENCODED_H264_STREAM
|
||||
fwrite((unsigned char *)packet.data(), 1, packet.size(), file_h264_);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@@ -196,11 +199,6 @@ int NvidiaVideoEncoder::Encode(
|
||||
return 0;
|
||||
}
|
||||
|
||||
int NvidiaVideoEncoder::OnEncodedImage(char *encoded_packets, size_t size) {
|
||||
LOG_INFO("OnEncodedImage not implemented");
|
||||
return 0;
|
||||
}
|
||||
|
||||
int NvidiaVideoEncoder::ForceIdr() {
|
||||
if (!encoder_) {
|
||||
return -1;
|
||||
|
||||
@@ -12,20 +12,12 @@ class NvidiaVideoEncoder : public VideoEncoder {
|
||||
virtual ~NvidiaVideoEncoder();
|
||||
|
||||
int Init();
|
||||
int Encode(const uint8_t* pData, int nSize,
|
||||
std::function<int(char* encoded_packets, size_t size,
|
||||
VideoFrameType frame_type)>
|
||||
on_encoded_image) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int Encode(const XVideoFrame* video_frame,
|
||||
std::function<int(char* encoded_packets, size_t size,
|
||||
VideoFrameType frame_type)>
|
||||
on_encoded_image);
|
||||
|
||||
virtual int OnEncodedImage(char* encoded_packets, size_t size);
|
||||
|
||||
int ForceIdr();
|
||||
|
||||
std::string GetEncoderName() { return "NvidiaH264"; }
|
||||
|
||||
@@ -5,17 +5,15 @@
|
||||
#include "libyuv.h"
|
||||
#include "log.h"
|
||||
|
||||
#define SAVE_RECEIVED_NV12_STREAM 0
|
||||
#define SAVE_ENCODED_H264_STREAM 0
|
||||
// #define SAVE_RECEIVED_NV12_STREAM
|
||||
// #define SAVE_ENCODED_H264_STREAM
|
||||
|
||||
void Nv12ToI420(unsigned char *Src_data, int src_width, int src_height,
|
||||
unsigned char *Dst_data) {
|
||||
// NV12 video size
|
||||
int NV12_Size = src_width * src_height * 3 / 2;
|
||||
// NV12
|
||||
int NV12_Y_Size = src_width * src_height;
|
||||
|
||||
// YUV420 video size
|
||||
int I420_Size = src_width * src_height * 3 / 2;
|
||||
// YUV420
|
||||
int I420_Y_Size = src_width * src_height;
|
||||
int I420_U_Size = (src_width >> 1) * (src_height >> 1);
|
||||
int I420_V_Size = I420_U_Size;
|
||||
@@ -29,7 +27,7 @@ void Nv12ToI420(unsigned char *Src_data, int src_width, int src_height,
|
||||
// dst: buffer address of Y channel、U channel and V channel
|
||||
unsigned char *Y_data_Dst = Dst_data;
|
||||
unsigned char *U_data_Dst = Dst_data + I420_Y_Size;
|
||||
unsigned char *V_data_Dst = Dst_data + I420_Y_Size + I420_U_Size;
|
||||
unsigned char *V_data_Dst = Dst_data + I420_Y_Size + I420_V_Size;
|
||||
int Dst_Stride_Y = src_width;
|
||||
int Dst_Stride_U = src_width >> 1;
|
||||
int Dst_Stride_V = Dst_Stride_U;
|
||||
@@ -43,17 +41,21 @@ void Nv12ToI420(unsigned char *Src_data, int src_width, int src_height,
|
||||
OpenH264Encoder::OpenH264Encoder() {}
|
||||
|
||||
OpenH264Encoder::~OpenH264Encoder() {
|
||||
if (SAVE_RECEIVED_NV12_STREAM && file_nv12_) {
|
||||
#ifdef SAVE_RECEIVED_NV12_STREAM
|
||||
if (file_nv12_) {
|
||||
fflush(file_nv12_);
|
||||
fclose(file_nv12_);
|
||||
file_nv12_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (SAVE_ENCODED_H264_STREAM && file_h264_) {
|
||||
#ifdef SAVE_ENCODED_H264_STREAM
|
||||
if (file_h264_) {
|
||||
fflush(file_h264_);
|
||||
fclose(file_h264_);
|
||||
file_h264_ = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (yuv420p_frame_) {
|
||||
delete[] yuv420p_frame_;
|
||||
@@ -160,19 +162,19 @@ int OpenH264Encoder::Init() {
|
||||
video_format_ = EVideoFormatType::videoFormatI420;
|
||||
openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, &video_format_);
|
||||
|
||||
if (SAVE_RECEIVED_NV12_STREAM) {
|
||||
file_nv12_ = fopen("received_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_WARN("Fail to open received_nv12_stream.yuv");
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_NV12_STREAM
|
||||
file_nv12_ = fopen("received_nv12_stream.yuv", "w+b");
|
||||
if (!file_nv12_) {
|
||||
LOG_WARN("Fail to open received_nv12_stream.yuv");
|
||||
}
|
||||
#endif
|
||||
|
||||
if (SAVE_ENCODED_H264_STREAM) {
|
||||
file_h264_ = fopen("encoded_h264_stream.h264", "w+b");
|
||||
if (!file_h264_) {
|
||||
LOG_WARN("Fail to open encoded_h264_stream.h264");
|
||||
}
|
||||
#ifdef SAVE_ENCODED_H264_STREAM
|
||||
file_h264_ = fopen("encoded_h264_stream.h264", "w+b");
|
||||
if (!file_h264_) {
|
||||
LOG_WARN("Fail to open encoded_h264_stream.h264");
|
||||
}
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -187,9 +189,9 @@ int OpenH264Encoder::Encode(
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (SAVE_RECEIVED_NV12_STREAM) {
|
||||
fwrite(video_frame->data, 1, video_frame->size, file_nv12_);
|
||||
}
|
||||
#ifdef SAVE_RECEIVED_NV12_STREAM
|
||||
fwrite(video_frame->data, 1, video_frame->size, file_nv12_);
|
||||
#endif
|
||||
|
||||
if (!yuv420p_frame_) {
|
||||
yuv420p_frame_capacity_ = video_frame->size;
|
||||
@@ -267,7 +269,7 @@ int OpenH264Encoder::Encode(
|
||||
}
|
||||
|
||||
size_t frag = 0;
|
||||
int encoded_frame_size = 0;
|
||||
size_t encoded_frame_size = 0;
|
||||
for (int layer = 0; layer < info.iLayerNum; ++layer) {
|
||||
const SLayerBSInfo &layerInfo = info.sLayerInfo[layer];
|
||||
size_t layer_len = 0;
|
||||
@@ -281,11 +283,9 @@ int OpenH264Encoder::Encode(
|
||||
|
||||
if (on_encoded_image) {
|
||||
on_encoded_image((char *)encoded_frame_, encoded_frame_size_, frame_type);
|
||||
if (SAVE_ENCODED_H264_STREAM) {
|
||||
fwrite(encoded_frame_, 1, encoded_frame_size_, file_h264_);
|
||||
}
|
||||
} else {
|
||||
OnEncodedImage((char *)encoded_frame_, encoded_frame_size_);
|
||||
#ifdef SAVE_ENCODED_H264_STREAM
|
||||
fwrite(encoded_frame_, 1, encoded_frame_size_, file_h264_);
|
||||
#endif
|
||||
}
|
||||
#else
|
||||
if (info.eFrameType == videoFrameTypeInvalid) {
|
||||
@@ -327,11 +327,9 @@ int OpenH264Encoder::Encode(
|
||||
|
||||
if (on_encoded_image) {
|
||||
on_encoded_image((char *)encoded_frame_, frame_type);
|
||||
if (SAVE_ENCODED_H264_STREAM) {
|
||||
fwrite(encoded_frame_, 1, encoded_frame_size_, file_h264_);
|
||||
}
|
||||
} else {
|
||||
OnEncodedImage((char *)encoded_frame_, encoded_frame_size_);
|
||||
#ifdef SAVE_ENCODED_H264_STREAM
|
||||
fwrite(encoded_frame_, 1, encoded_frame_size_, file_h264_);
|
||||
#endif
|
||||
}
|
||||
|
||||
EVideoFrameType ft_temp = info.eFrameType;
|
||||
@@ -353,11 +351,6 @@ int OpenH264Encoder::Encode(
|
||||
return 0;
|
||||
}
|
||||
|
||||
int OpenH264Encoder::OnEncodedImage(char *encoded_packets, size_t size) {
|
||||
LOG_INFO("OnEncodedImage not implemented");
|
||||
return 0;
|
||||
}
|
||||
|
||||
int OpenH264Encoder::ForceIdr() {
|
||||
if (openh264_encoder_) {
|
||||
return openh264_encoder_->ForceIntraFrame(true);
|
||||
|
||||
@@ -23,20 +23,12 @@ class OpenH264Encoder : public VideoEncoder {
|
||||
virtual ~OpenH264Encoder();
|
||||
|
||||
int Init();
|
||||
int Encode(const uint8_t* pData, int nSize,
|
||||
std::function<int(char* encoded_packets, size_t size,
|
||||
VideoFrameType frame_type)>
|
||||
on_encoded_image) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int Encode(const XVideoFrame* video_frame,
|
||||
std::function<int(char* encoded_packets, size_t size,
|
||||
VideoFrameType frame_type)>
|
||||
on_encoded_image);
|
||||
|
||||
int OnEncodedImage(char* encoded_packets, size_t size);
|
||||
|
||||
int ForceIdr();
|
||||
|
||||
std::string GetEncoderName() { return "OpenH264"; }
|
||||
@@ -48,8 +40,8 @@ class OpenH264Encoder : public VideoEncoder {
|
||||
int Release();
|
||||
|
||||
private:
|
||||
int frame_width_ = 1280;
|
||||
int frame_height_ = 720;
|
||||
uint32_t frame_width_ = 1280;
|
||||
uint32_t frame_height_ = 720;
|
||||
int key_frame_interval_ = 300;
|
||||
int target_bitrate_ = 10000000;
|
||||
int max_bitrate_ = 10000000;
|
||||
@@ -68,10 +60,10 @@ class OpenH264Encoder : public VideoEncoder {
|
||||
int video_format_;
|
||||
SSourcePicture raw_frame_;
|
||||
unsigned char* yuv420p_frame_ = nullptr;
|
||||
int yuv420p_frame_capacity_ = 0;
|
||||
size_t yuv420p_frame_capacity_ = 0;
|
||||
uint8_t* encoded_frame_ = nullptr;
|
||||
int encoded_frame_capacity_ = 0;
|
||||
int encoded_frame_size_ = 0;
|
||||
size_t encoded_frame_capacity_ = 0;
|
||||
size_t encoded_frame_size_ = 0;
|
||||
bool got_output = false;
|
||||
bool is_keyframe = false;
|
||||
int temporal_ = 1;
|
||||
|
||||
@@ -20,18 +20,11 @@ class VideoEncoder {
|
||||
public:
|
||||
virtual int Init() = 0;
|
||||
|
||||
virtual int Encode(const uint8_t* pData, int nSize,
|
||||
std::function<int(char* encoded_packets, size_t size,
|
||||
VideoFrameType frame_type)>
|
||||
on_encoded_image) = 0;
|
||||
|
||||
virtual int Encode(const XVideoFrame* video_frame,
|
||||
std::function<int(char* encoded_packets, size_t size,
|
||||
VideoFrameType frame_type)>
|
||||
on_encoded_image) = 0;
|
||||
|
||||
virtual int OnEncodedImage(char* encoded_packets, size_t size) = 0;
|
||||
|
||||
virtual int ForceIdr() = 0;
|
||||
|
||||
virtual std::string GetEncoderName() = 0;
|
||||
|
||||
1306
src/qos/kcp/ikcp.c
1306
src/qos/kcp/ikcp.c
File diff suppressed because it is too large
Load Diff
@@ -1,416 +0,0 @@
|
||||
//=====================================================================
|
||||
//
|
||||
// KCP - A Better ARQ Protocol Implementation
|
||||
// skywind3000 (at) gmail.com, 2010-2011
|
||||
//
|
||||
// Features:
|
||||
// + Average RTT reduce 30% - 40% vs traditional ARQ like tcp.
|
||||
// + Maximum RTT reduce three times vs tcp.
|
||||
// + Lightweight, distributed as a single source file.
|
||||
//
|
||||
//=====================================================================
|
||||
#ifndef __IKCP_H__
|
||||
#define __IKCP_H__
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdlib.h>
|
||||
#include <assert.h>
|
||||
|
||||
|
||||
//=====================================================================
|
||||
// 32BIT INTEGER DEFINITION
|
||||
//=====================================================================
|
||||
#ifndef __INTEGER_32_BITS__
|
||||
#define __INTEGER_32_BITS__
|
||||
#if defined(_WIN64) || defined(WIN64) || defined(__amd64__) || \
|
||||
defined(__x86_64) || defined(__x86_64__) || defined(_M_IA64) || \
|
||||
defined(_M_AMD64)
|
||||
typedef unsigned int ISTDUINT32;
|
||||
typedef int ISTDINT32;
|
||||
#elif defined(_WIN32) || defined(WIN32) || defined(__i386__) || \
|
||||
defined(__i386) || defined(_M_X86)
|
||||
typedef unsigned long ISTDUINT32;
|
||||
typedef long ISTDINT32;
|
||||
#elif defined(__MACOS__)
|
||||
typedef UInt32 ISTDUINT32;
|
||||
typedef SInt32 ISTDINT32;
|
||||
#elif defined(__APPLE__) && defined(__MACH__)
|
||||
#include <sys/types.h>
|
||||
typedef u_int32_t ISTDUINT32;
|
||||
typedef int32_t ISTDINT32;
|
||||
#elif defined(__BEOS__)
|
||||
#include <sys/inttypes.h>
|
||||
typedef u_int32_t ISTDUINT32;
|
||||
typedef int32_t ISTDINT32;
|
||||
#elif (defined(_MSC_VER) || defined(__BORLANDC__)) && (!defined(__MSDOS__))
|
||||
typedef unsigned __int32 ISTDUINT32;
|
||||
typedef __int32 ISTDINT32;
|
||||
#elif defined(__GNUC__)
|
||||
#include <stdint.h>
|
||||
typedef uint32_t ISTDUINT32;
|
||||
typedef int32_t ISTDINT32;
|
||||
#else
|
||||
typedef unsigned long ISTDUINT32;
|
||||
typedef long ISTDINT32;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
|
||||
//=====================================================================
|
||||
// Integer Definition
|
||||
//=====================================================================
|
||||
#ifndef __IINT8_DEFINED
|
||||
#define __IINT8_DEFINED
|
||||
typedef char IINT8;
|
||||
#endif
|
||||
|
||||
#ifndef __IUINT8_DEFINED
|
||||
#define __IUINT8_DEFINED
|
||||
typedef unsigned char IUINT8;
|
||||
#endif
|
||||
|
||||
#ifndef __IUINT16_DEFINED
|
||||
#define __IUINT16_DEFINED
|
||||
typedef unsigned short IUINT16;
|
||||
#endif
|
||||
|
||||
#ifndef __IINT16_DEFINED
|
||||
#define __IINT16_DEFINED
|
||||
typedef short IINT16;
|
||||
#endif
|
||||
|
||||
#ifndef __IINT32_DEFINED
|
||||
#define __IINT32_DEFINED
|
||||
typedef ISTDINT32 IINT32;
|
||||
#endif
|
||||
|
||||
#ifndef __IUINT32_DEFINED
|
||||
#define __IUINT32_DEFINED
|
||||
typedef ISTDUINT32 IUINT32;
|
||||
#endif
|
||||
|
||||
#ifndef __IINT64_DEFINED
|
||||
#define __IINT64_DEFINED
|
||||
#if defined(_MSC_VER) || defined(__BORLANDC__)
|
||||
typedef __int64 IINT64;
|
||||
#else
|
||||
typedef long long IINT64;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifndef __IUINT64_DEFINED
|
||||
#define __IUINT64_DEFINED
|
||||
#if defined(_MSC_VER) || defined(__BORLANDC__)
|
||||
typedef unsigned __int64 IUINT64;
|
||||
#else
|
||||
typedef unsigned long long IUINT64;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifndef INLINE
|
||||
#if defined(__GNUC__)
|
||||
|
||||
#if (__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 1))
|
||||
#define INLINE __inline__ __attribute__((always_inline))
|
||||
#else
|
||||
#define INLINE __inline__
|
||||
#endif
|
||||
|
||||
#elif (defined(_MSC_VER) || defined(__BORLANDC__) || defined(__WATCOMC__))
|
||||
#define INLINE __inline
|
||||
#else
|
||||
#define INLINE
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if (!defined(__cplusplus)) && (!defined(inline))
|
||||
#define inline INLINE
|
||||
#endif
|
||||
|
||||
|
||||
//=====================================================================
|
||||
// QUEUE DEFINITION
|
||||
//=====================================================================
|
||||
#ifndef __IQUEUE_DEF__
|
||||
#define __IQUEUE_DEF__
|
||||
|
||||
struct IQUEUEHEAD {
|
||||
struct IQUEUEHEAD *next, *prev;
|
||||
};
|
||||
|
||||
typedef struct IQUEUEHEAD iqueue_head;
|
||||
|
||||
|
||||
//---------------------------------------------------------------------
|
||||
// queue init
|
||||
//---------------------------------------------------------------------
|
||||
#define IQUEUE_HEAD_INIT(name) { &(name), &(name) }
|
||||
#define IQUEUE_HEAD(name) \
|
||||
struct IQUEUEHEAD name = IQUEUE_HEAD_INIT(name)
|
||||
|
||||
#define IQUEUE_INIT(ptr) ( \
|
||||
(ptr)->next = (ptr), (ptr)->prev = (ptr))
|
||||
|
||||
#define IOFFSETOF(TYPE, MEMBER) ((size_t) &((TYPE *)0)->MEMBER)
|
||||
|
||||
#define ICONTAINEROF(ptr, type, member) ( \
|
||||
(type*)( ((char*)((type*)ptr)) - IOFFSETOF(type, member)) )
|
||||
|
||||
#define IQUEUE_ENTRY(ptr, type, member) ICONTAINEROF(ptr, type, member)
|
||||
|
||||
|
||||
//---------------------------------------------------------------------
|
||||
// queue operation
|
||||
//---------------------------------------------------------------------
|
||||
#define IQUEUE_ADD(node, head) ( \
|
||||
(node)->prev = (head), (node)->next = (head)->next, \
|
||||
(head)->next->prev = (node), (head)->next = (node))
|
||||
|
||||
#define IQUEUE_ADD_TAIL(node, head) ( \
|
||||
(node)->prev = (head)->prev, (node)->next = (head), \
|
||||
(head)->prev->next = (node), (head)->prev = (node))
|
||||
|
||||
#define IQUEUE_DEL_BETWEEN(p, n) ((n)->prev = (p), (p)->next = (n))
|
||||
|
||||
#define IQUEUE_DEL(entry) (\
|
||||
(entry)->next->prev = (entry)->prev, \
|
||||
(entry)->prev->next = (entry)->next, \
|
||||
(entry)->next = 0, (entry)->prev = 0)
|
||||
|
||||
#define IQUEUE_DEL_INIT(entry) do { \
|
||||
IQUEUE_DEL(entry); IQUEUE_INIT(entry); } while (0)
|
||||
|
||||
#define IQUEUE_IS_EMPTY(entry) ((entry) == (entry)->next)
|
||||
|
||||
#define iqueue_init IQUEUE_INIT
|
||||
#define iqueue_entry IQUEUE_ENTRY
|
||||
#define iqueue_add IQUEUE_ADD
|
||||
#define iqueue_add_tail IQUEUE_ADD_TAIL
|
||||
#define iqueue_del IQUEUE_DEL
|
||||
#define iqueue_del_init IQUEUE_DEL_INIT
|
||||
#define iqueue_is_empty IQUEUE_IS_EMPTY
|
||||
|
||||
#define IQUEUE_FOREACH(iterator, head, TYPE, MEMBER) \
|
||||
for ((iterator) = iqueue_entry((head)->next, TYPE, MEMBER); \
|
||||
&((iterator)->MEMBER) != (head); \
|
||||
(iterator) = iqueue_entry((iterator)->MEMBER.next, TYPE, MEMBER))
|
||||
|
||||
#define iqueue_foreach(iterator, head, TYPE, MEMBER) \
|
||||
IQUEUE_FOREACH(iterator, head, TYPE, MEMBER)
|
||||
|
||||
#define iqueue_foreach_entry(pos, head) \
|
||||
for( (pos) = (head)->next; (pos) != (head) ; (pos) = (pos)->next )
|
||||
|
||||
|
||||
#define __iqueue_splice(list, head) do { \
|
||||
iqueue_head *first = (list)->next, *last = (list)->prev; \
|
||||
iqueue_head *at = (head)->next; \
|
||||
(first)->prev = (head), (head)->next = (first); \
|
||||
(last)->next = (at), (at)->prev = (last); } while (0)
|
||||
|
||||
#define iqueue_splice(list, head) do { \
|
||||
if (!iqueue_is_empty(list)) __iqueue_splice(list, head); } while (0)
|
||||
|
||||
#define iqueue_splice_init(list, head) do { \
|
||||
iqueue_splice(list, head); iqueue_init(list); } while (0)
|
||||
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(disable:4311)
|
||||
#pragma warning(disable:4312)
|
||||
#pragma warning(disable:4996)
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
//---------------------------------------------------------------------
|
||||
// BYTE ORDER & ALIGNMENT
|
||||
//---------------------------------------------------------------------
|
||||
#ifndef IWORDS_BIG_ENDIAN
|
||||
#ifdef _BIG_ENDIAN_
|
||||
#if _BIG_ENDIAN_
|
||||
#define IWORDS_BIG_ENDIAN 1
|
||||
#endif
|
||||
#endif
|
||||
#ifndef IWORDS_BIG_ENDIAN
|
||||
#if defined(__hppa__) || \
|
||||
defined(__m68k__) || defined(mc68000) || defined(_M_M68K) || \
|
||||
(defined(__MIPS__) && defined(__MIPSEB__)) || \
|
||||
defined(__ppc__) || defined(__POWERPC__) || defined(_M_PPC) || \
|
||||
defined(__sparc__) || defined(__powerpc__) || \
|
||||
defined(__mc68000__) || defined(__s390x__) || defined(__s390__)
|
||||
#define IWORDS_BIG_ENDIAN 1
|
||||
#endif
|
||||
#endif
|
||||
#ifndef IWORDS_BIG_ENDIAN
|
||||
#define IWORDS_BIG_ENDIAN 0
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifndef IWORDS_MUST_ALIGN
|
||||
#if defined(__i386__) || defined(__i386) || defined(_i386_)
|
||||
#define IWORDS_MUST_ALIGN 0
|
||||
#elif defined(_M_IX86) || defined(_X86_) || defined(__x86_64__)
|
||||
#define IWORDS_MUST_ALIGN 0
|
||||
#elif defined(__amd64) || defined(__amd64__)
|
||||
#define IWORDS_MUST_ALIGN 0
|
||||
#else
|
||||
#define IWORDS_MUST_ALIGN 1
|
||||
#endif
|
||||
#endif
|
||||
|
||||
|
||||
//=====================================================================
|
||||
// SEGMENT
|
||||
//=====================================================================
|
||||
struct IKCPSEG
|
||||
{
|
||||
struct IQUEUEHEAD node;
|
||||
IUINT32 conv;
|
||||
IUINT32 cmd;
|
||||
IUINT32 frg;
|
||||
IUINT32 wnd;
|
||||
IUINT32 ts;
|
||||
IUINT32 sn;
|
||||
IUINT32 una;
|
||||
IUINT32 len;
|
||||
IUINT32 resendts;
|
||||
IUINT32 rto;
|
||||
IUINT32 fastack;
|
||||
IUINT32 xmit;
|
||||
char data[1];
|
||||
};
|
||||
|
||||
|
||||
//---------------------------------------------------------------------
|
||||
// IKCPCB
|
||||
//---------------------------------------------------------------------
|
||||
struct IKCPCB
|
||||
{
|
||||
IUINT32 conv, mtu, mss, state;
|
||||
IUINT32 snd_una, snd_nxt, rcv_nxt;
|
||||
IUINT32 ts_recent, ts_lastack, ssthresh;
|
||||
IINT32 rx_rttval, rx_srtt, rx_rto, rx_minrto;
|
||||
IUINT32 snd_wnd, rcv_wnd, rmt_wnd, cwnd, probe;
|
||||
IUINT32 current, interval, ts_flush, xmit;
|
||||
IUINT32 nrcv_buf, nsnd_buf;
|
||||
IUINT32 nrcv_que, nsnd_que;
|
||||
IUINT32 nodelay, updated;
|
||||
IUINT32 ts_probe, probe_wait;
|
||||
IUINT32 dead_link, incr;
|
||||
struct IQUEUEHEAD snd_queue;
|
||||
struct IQUEUEHEAD rcv_queue;
|
||||
struct IQUEUEHEAD snd_buf;
|
||||
struct IQUEUEHEAD rcv_buf;
|
||||
IUINT32 *acklist;
|
||||
IUINT32 ackcount;
|
||||
IUINT32 ackblock;
|
||||
void *user;
|
||||
char *buffer;
|
||||
int fastresend;
|
||||
int fastlimit;
|
||||
int nocwnd, stream;
|
||||
int logmask;
|
||||
int (*output)(const char *buf, int len, struct IKCPCB *kcp, void *user);
|
||||
void (*writelog)(const char *log, struct IKCPCB *kcp, void *user);
|
||||
};
|
||||
|
||||
|
||||
typedef struct IKCPCB ikcpcb;
|
||||
|
||||
#define IKCP_LOG_OUTPUT 1
|
||||
#define IKCP_LOG_INPUT 2
|
||||
#define IKCP_LOG_SEND 4
|
||||
#define IKCP_LOG_RECV 8
|
||||
#define IKCP_LOG_IN_DATA 16
|
||||
#define IKCP_LOG_IN_ACK 32
|
||||
#define IKCP_LOG_IN_PROBE 64
|
||||
#define IKCP_LOG_IN_WINS 128
|
||||
#define IKCP_LOG_OUT_DATA 256
|
||||
#define IKCP_LOG_OUT_ACK 512
|
||||
#define IKCP_LOG_OUT_PROBE 1024
|
||||
#define IKCP_LOG_OUT_WINS 2048
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
//---------------------------------------------------------------------
|
||||
// interface
|
||||
//---------------------------------------------------------------------
|
||||
|
||||
// create a new kcp control object, 'conv' must equal in two endpoint
|
||||
// from the same connection. 'user' will be passed to the output callback
|
||||
// output callback can be setup like this: 'kcp->output = my_udp_output'
|
||||
ikcpcb* ikcp_create(IUINT32 conv, void *user);
|
||||
|
||||
// release kcp control object
|
||||
void ikcp_release(ikcpcb *kcp);
|
||||
|
||||
// set output callback, which will be invoked by kcp
|
||||
void ikcp_setoutput(ikcpcb *kcp, int (*output)(const char *buf, int len,
|
||||
ikcpcb *kcp, void *user));
|
||||
|
||||
// user/upper level recv: returns size, returns below zero for EAGAIN
|
||||
int ikcp_recv(ikcpcb *kcp, char *buffer, int len);
|
||||
|
||||
// user/upper level send, returns below zero for error
|
||||
int ikcp_send(ikcpcb *kcp, const char *buffer, int len);
|
||||
|
||||
// update state (call it repeatedly, every 10ms-100ms), or you can ask
|
||||
// ikcp_check when to call it again (without ikcp_input/_send calling).
|
||||
// 'current' - current timestamp in millisec.
|
||||
void ikcp_update(ikcpcb *kcp, IUINT32 current);
|
||||
|
||||
// Determine when should you invoke ikcp_update:
|
||||
// returns when you should invoke ikcp_update in millisec, if there
|
||||
// is no ikcp_input/_send calling. you can call ikcp_update in that
|
||||
// time, instead of call update repeatly.
|
||||
// Important to reduce unnacessary ikcp_update invoking. use it to
|
||||
// schedule ikcp_update (eg. implementing an epoll-like mechanism,
|
||||
// or optimize ikcp_update when handling massive kcp connections)
|
||||
IUINT32 ikcp_check(const ikcpcb *kcp, IUINT32 current);
|
||||
|
||||
// when you received a low level packet (eg. UDP packet), call it
|
||||
int ikcp_input(ikcpcb *kcp, const char *data, long size);
|
||||
|
||||
// flush pending data
|
||||
void ikcp_flush(ikcpcb *kcp);
|
||||
|
||||
// check the size of next message in the recv queue
|
||||
int ikcp_peeksize(const ikcpcb *kcp);
|
||||
|
||||
// change MTU size, default is 1400
|
||||
int ikcp_setmtu(ikcpcb *kcp, int mtu);
|
||||
|
||||
// set maximum window size: sndwnd=32, rcvwnd=32 by default
|
||||
int ikcp_wndsize(ikcpcb *kcp, int sndwnd, int rcvwnd);
|
||||
|
||||
// get how many packet is waiting to be sent
|
||||
int ikcp_waitsnd(const ikcpcb *kcp);
|
||||
|
||||
// fastest: ikcp_nodelay(kcp, 1, 20, 2, 1)
|
||||
// nodelay: 0:disable(default), 1:enable
|
||||
// interval: internal update timer interval in millisec, default is 100ms
|
||||
// resend: 0:disable fast resend(default), 1:enable fast resend
|
||||
// nc: 0:normal congestion control(default), 1:disable congestion control
|
||||
int ikcp_nodelay(ikcpcb *kcp, int nodelay, int interval, int resend, int nc);
|
||||
|
||||
|
||||
void ikcp_log(ikcpcb *kcp, int mask, const char *fmt, ...);
|
||||
|
||||
// setup allocator
|
||||
void ikcp_allocator(void* (*new_malloc)(size_t), void (*new_free)(void*));
|
||||
|
||||
// read conv
|
||||
IUINT32 ikcp_getconv(const void *ptr);
|
||||
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
#include <thread>
|
||||
|
||||
#include "common.h"
|
||||
#include "ikcp.h"
|
||||
#include "log.h"
|
||||
#if __APPLE__
|
||||
#else
|
||||
@@ -49,7 +48,7 @@ int IceTransmission::SetLocalCapabilities(
|
||||
hardware_acceleration_ = hardware_acceleration;
|
||||
use_trickle_ice_ = use_trickle_ice;
|
||||
use_reliable_ice_ = use_reliable_ice;
|
||||
enable_turn_ = force_turn;
|
||||
enable_turn_ = enable_turn;
|
||||
force_turn_ = force_turn;
|
||||
support_video_payload_types_ = video_payload_types;
|
||||
support_audio_payload_types_ = audio_payload_types;
|
||||
@@ -105,10 +104,9 @@ int IceTransmission::InitIceTransmission(
|
||||
});
|
||||
rtp_video_receiver_->SetOnReceiveCompleteFrame(
|
||||
[this](VideoFrame &video_frame) -> void {
|
||||
// LOG_ERROR("OnReceiveCompleteFrame {}", video_frame.Size());
|
||||
ice_io_statistics_->UpdateVideoInboundBytes(video_frame.Size());
|
||||
|
||||
int num_frame_returned = video_decoder_->Decode(
|
||||
ice_io_statistics_->UpdateVideoInboundBytes(
|
||||
(uint32_t)video_frame.Size());
|
||||
[[maybe_unused]] int num_frame_returned = video_decoder_->Decode(
|
||||
(uint8_t *)video_frame.Buffer(), video_frame.Size(),
|
||||
[this](VideoFrame video_frame) {
|
||||
if (on_receive_video_) {
|
||||
@@ -140,7 +138,7 @@ int IceTransmission::InitIceTransmission(
|
||||
return -2;
|
||||
}
|
||||
|
||||
ice_io_statistics_->UpdateVideoOutboundBytes(size);
|
||||
ice_io_statistics_->UpdateVideoOutboundBytes((uint32_t)size);
|
||||
return ice_agent_->Send(data, size);
|
||||
});
|
||||
|
||||
@@ -166,9 +164,9 @@ int IceTransmission::InitIceTransmission(
|
||||
});
|
||||
rtp_audio_receiver_->SetOnReceiveData([this](const char *data,
|
||||
size_t size) -> void {
|
||||
ice_io_statistics_->UpdateAudioInboundBytes(size);
|
||||
ice_io_statistics_->UpdateAudioInboundBytes((uint32_t)size);
|
||||
|
||||
int num_frame_returned = audio_decoder_->Decode(
|
||||
[[maybe_unused]] int num_frame_returned = audio_decoder_->Decode(
|
||||
(uint8_t *)data, size, [this](uint8_t *data, int size) {
|
||||
if (on_receive_audio_) {
|
||||
on_receive_audio_((const char *)data, size, remote_user_id_.data(),
|
||||
@@ -192,7 +190,7 @@ int IceTransmission::InitIceTransmission(
|
||||
return -2;
|
||||
}
|
||||
|
||||
ice_io_statistics_->UpdateAudioOutboundBytes(size);
|
||||
ice_io_statistics_->UpdateAudioOutboundBytes((uint32_t)size);
|
||||
return ice_agent_->Send(data, size);
|
||||
});
|
||||
|
||||
@@ -218,7 +216,7 @@ int IceTransmission::InitIceTransmission(
|
||||
});
|
||||
rtp_data_receiver_->SetOnReceiveData(
|
||||
[this](const char *data, size_t size) -> void {
|
||||
ice_io_statistics_->UpdateDataInboundBytes(size);
|
||||
ice_io_statistics_->UpdateDataInboundBytes((uint32_t)size);
|
||||
|
||||
if (on_receive_data_) {
|
||||
on_receive_data_(data, size, remote_user_id_.data(),
|
||||
@@ -241,7 +239,7 @@ int IceTransmission::InitIceTransmission(
|
||||
return -2;
|
||||
}
|
||||
|
||||
ice_io_statistics_->UpdateDataOutboundBytes(size);
|
||||
ice_io_statistics_->UpdateDataOutboundBytes((uint32_t)size);
|
||||
return ice_agent_->Send(data, size);
|
||||
});
|
||||
|
||||
@@ -253,8 +251,9 @@ int IceTransmission::InitIceTransmission(
|
||||
turn_password);
|
||||
|
||||
ice_agent_->CreateIceAgent(
|
||||
[](NiceAgent *agent, guint stream_id, guint component_id,
|
||||
NiceComponentState state, gpointer user_ptr) {
|
||||
[]([[maybe_unused]] NiceAgent *agent, [[maybe_unused]] guint stream_id,
|
||||
[[maybe_unused]] guint component_id, NiceComponentState state,
|
||||
gpointer user_ptr) {
|
||||
if (user_ptr) {
|
||||
IceTransmission *ice_transmission_obj =
|
||||
static_cast<IceTransmission *>(user_ptr);
|
||||
@@ -313,7 +312,8 @@ int IceTransmission::InitIceTransmission(
|
||||
}
|
||||
}
|
||||
},
|
||||
[](NiceAgent *agent, guint stream_id, gpointer user_ptr) {
|
||||
[]([[maybe_unused]] NiceAgent *agent, [[maybe_unused]] guint stream_id,
|
||||
gpointer user_ptr) {
|
||||
// non-trickle
|
||||
if (user_ptr) {
|
||||
IceTransmission *ice_transmission_obj =
|
||||
@@ -365,8 +365,9 @@ int IceTransmission::InitIceTransmission(
|
||||
&net_traffic_stats, ice_transmission_obj->user_data_);
|
||||
}
|
||||
},
|
||||
[](NiceAgent *agent, guint stream_id, guint component_id, guint size,
|
||||
gchar *buffer, gpointer user_ptr) {
|
||||
[]([[maybe_unused]] NiceAgent *agent, [[maybe_unused]] guint stream_id,
|
||||
[[maybe_unused]] guint component_id, guint size, gchar *buffer,
|
||||
gpointer user_ptr) {
|
||||
if (user_ptr) {
|
||||
IceTransmission *ice_transmission_obj =
|
||||
static_cast<IceTransmission *>(user_ptr);
|
||||
@@ -977,7 +978,7 @@ int IceTransmission::SendVideoFrame(const XVideoFrame *video_frame) {
|
||||
if (video_rtp_codec_) {
|
||||
video_rtp_codec_->Encode(
|
||||
static_cast<RtpCodec::VideoFrameType>(frame_type),
|
||||
(uint8_t *)encoded_frame, size, packets);
|
||||
(uint8_t *)encoded_frame, (uint32_t)size, packets);
|
||||
}
|
||||
rtp_video_sender_->Enqueue(packets);
|
||||
}
|
||||
@@ -1007,15 +1008,15 @@ int IceTransmission::SendAudioFrame(const char *data, size_t size) {
|
||||
if (rtp_audio_sender_) {
|
||||
if (audio_rtp_codec_) {
|
||||
std::vector<RtpPacket> packets;
|
||||
audio_rtp_codec_->Encode((uint8_t *)encoded_audio_buffer, size,
|
||||
packets);
|
||||
audio_rtp_codec_->Encode((uint8_t *)encoded_audio_buffer,
|
||||
(uint32_t)size, packets);
|
||||
rtp_audio_sender_->Enqueue(packets);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
return 0;
|
||||
return ret;
|
||||
}
|
||||
|
||||
int IceTransmission::SendDataFrame(const char *data, size_t size) {
|
||||
@@ -1030,7 +1031,7 @@ int IceTransmission::SendDataFrame(const char *data, size_t size) {
|
||||
|
||||
if (rtp_data_sender_) {
|
||||
if (data_rtp_codec_) {
|
||||
data_rtp_codec_->Encode((uint8_t *)data, size, packets);
|
||||
data_rtp_codec_->Encode((uint8_t *)data, (uint32_t)size, packets);
|
||||
rtp_data_sender_->Enqueue(packets);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,7 +136,8 @@ void WsClient::Ping(websocketpp::connection_hdl hdl) {
|
||||
|
||||
WsStatus WsClient::GetStatus() { return ws_status_; }
|
||||
|
||||
void WsClient::OnOpen(client *c, websocketpp::connection_hdl hdl) {
|
||||
void WsClient::OnOpen([[maybe_unused]] client *c,
|
||||
websocketpp::connection_hdl hdl) {
|
||||
ws_status_ = WsStatus::WsOpened;
|
||||
on_ws_status_(WsStatus::WsOpened);
|
||||
|
||||
@@ -155,13 +156,15 @@ void WsClient::OnOpen(client *c, websocketpp::connection_hdl hdl) {
|
||||
}
|
||||
}
|
||||
|
||||
void WsClient::OnFail(client *c, websocketpp::connection_hdl hdl) {
|
||||
void WsClient::OnFail([[maybe_unused]] client *c,
|
||||
websocketpp::connection_hdl hdl) {
|
||||
ws_status_ = WsStatus::WsFailed;
|
||||
on_ws_status_(WsStatus::WsFailed);
|
||||
Connect(uri_);
|
||||
}
|
||||
|
||||
void WsClient::OnClose(client *c, websocketpp::connection_hdl hdl) {
|
||||
void WsClient::OnClose([[maybe_unused]] client *c,
|
||||
websocketpp::connection_hdl hdl) {
|
||||
ws_status_ = WsStatus::WsServerClosed;
|
||||
on_ws_status_(WsStatus::WsServerClosed);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user