Use SDL2 to display BGRA frame

This commit is contained in:
dijunkun
2023-08-30 00:29:08 +08:00
parent dcd5273cf6
commit 1bef5a1cab
2 changed files with 56 additions and 32 deletions

View File

@@ -17,7 +17,7 @@ extern "C" {
#include "SDL2/SDL.h"
int screen_w = 2560, screen_h = 1440;
const int pixel_w = 2560, pixel_h = 1440;
const int pixel_w = 1280, pixel_h = 720;
unsigned char buffer[pixel_w * pixel_h * 3 / 2];
unsigned char dst_buffer[pixel_w * pixel_h * 3 / 2];
@@ -65,23 +65,48 @@ int YUV420ToNV12FFmpeg(unsigned char *src_buffer, int width, int height,
return 0;
}
int BGRAToNV12FFmpeg(unsigned char *src_buffer, int width, int height,
unsigned char *dst_buffer) {
AVFrame *Input_pFrame = av_frame_alloc();
AVFrame *Output_pFrame = av_frame_alloc();
struct SwsContext *img_convert_ctx =
sws_getContext(width, height, AV_PIX_FMT_BGRA, 1280, 720, AV_PIX_FMT_NV12,
SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
av_image_fill_arrays(Input_pFrame->data, Input_pFrame->linesize, src_buffer,
AV_PIX_FMT_BGRA, width, height, 1);
av_image_fill_arrays(Output_pFrame->data, Output_pFrame->linesize, dst_buffer,
AV_PIX_FMT_NV12, 1280, 720, 1);
sws_scale(img_convert_ctx, (uint8_t const **)Input_pFrame->data,
Input_pFrame->linesize, 0, height, Output_pFrame->data,
Output_pFrame->linesize);
if (Input_pFrame) av_free(Input_pFrame);
if (Output_pFrame) av_free(Output_pFrame);
if (img_convert_ctx) sws_freeContext(img_convert_ctx);
return 0;
}
void OnFrame(unsigned char *data, int size, int width, int height) {
std::cout << "Receive frame: w:" << width << " h:" << height
<< " size:" << size << std::endl;
// YUV420ToNV12FFmpeg(data, width, height, dst_buffer);
BGRAToNV12FFmpeg(data, width, height, dst_buffer);
// SDL_UpdateTexture(sdlTexture, NULL, data, pixel_w);
memcpy(rgbData, data, width * height);
SDL_UpdateTexture(sdlTexture, NULL, dst_buffer, pixel_w);
// memcpy(rgbData, data, width * height);
// FIX: If window is resize
// sdlRect.x = 0;
// sdlRect.y = 0;
// sdlRect.w = screen_w;
// sdlRect.h = screen_h;
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
// SDL_RenderClear(sdlRenderer);
// SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
// SDL_RenderPresent(sdlRenderer);
SDL_RenderClear(sdlRenderer);
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
SDL_RenderPresent(sdlRenderer);
}
int main() {
@@ -93,7 +118,7 @@ int main() {
rect.right = GetSystemMetrics(SM_CXSCREEN);
rect.bottom = GetSystemMetrics(SM_CYSCREEN);
recorder->Init(rect, 10, OnFrame);
recorder->Init(rect, 60, OnFrame);
recorder->Start();
@@ -104,7 +129,7 @@ int main() {
SDL_Window *screen;
screen = SDL_CreateWindow("RTS Receiver", SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h,
SDL_WINDOWPOS_UNDEFINED, screen_w / 2, screen_h / 2,
SDL_WINDOW_RESIZABLE);
if (!screen) {
printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
@@ -115,15 +140,14 @@ int main() {
Uint32 pixformat = 0;
pixformat = SDL_PIXELFORMAT_NV12;
// sdlTexture = SDL_CreateTexture(sdlRenderer, pixformat,
// SDL_TEXTUREACCESS_STREAMING, pixel_w,
// pixel_h);
sdlTexture = SDL_CreateTexture(sdlRenderer, pixformat,
SDL_TEXTUREACCESS_STREAMING, 1280, 720);
SDL_Surface *surface =
SDL_CreateRGBSurfaceFrom(rgbData, pixel_w, pixel_h, 24, pixel_w * 3,
0x000000FF, 0x0000FF00, 0x00FF0000, 0);
// SDL_Surface *surface =
// SDL_CreateRGBSurfaceFrom(rgbData, pixel_w, pixel_h, 24, pixel_w * 3,
// 0x000000FF, 0x0000FF00, 0x00FF0000, 0);
sdlTexture = SDL_CreateTextureFromSurface(sdlRenderer, surface);
// sdlTexture = SDL_CreateTextureFromSurface(sdlRenderer, surface);
SDL_Thread *refresh_thread = SDL_CreateThread(refresh_video, NULL, NULL);
SDL_Event event;

View File

@@ -127,24 +127,24 @@ int ScreenCaptureWgc::Stop() {
void ScreenCaptureWgc::OnFrame(const WgcSession::wgc_session_frame &frame) {
std::cout << "onframe" << std::endl;
AVFrame *av_frame = av_frame_alloc();
// AVFrame *av_frame = av_frame_alloc();
av_frame->pts = av_gettime_relative();
av_frame->pkt_dts = av_frame->pts;
// av_frame->pkt_pts = av_frame->pts;
// av_frame->pts = av_gettime_relative();
// av_frame->pkt_dts = av_frame->pts;
// // av_frame->pkt_pts = av_frame->pts;
av_frame->width = frame.width;
av_frame->height = frame.height;
av_frame->format = AV_PIX_FMT_BGRA;
av_frame->pict_type = AV_PICTURE_TYPE_NONE;
av_frame->pkt_size = frame.width * frame.height * 4;
// av_frame->width = frame.width;
// av_frame->height = frame.height;
// av_frame->format = AV_PIX_FMT_BGRA;
// av_frame->pict_type = AV_PICTURE_TYPE_NONE;
// av_frame->pkt_size = frame.width * frame.height * 4;
av_image_fill_arrays(av_frame->data, av_frame->linesize, frame.data,
AV_PIX_FMT_BGRA, frame.width, frame.height, 1);
// av_image_fill_arrays(av_frame->data, av_frame->linesize, frame.data,
// AV_PIX_FMT_BGRA, frame.width, frame.height, 1);
if (_on_data)
_on_data((unsigned char *)av_frame->data, av_frame->pkt_size, frame.width,
frame.height);
_on_data((unsigned char *)frame.data, frame.width * frame.height * 4,
frame.width, frame.height);
// av_frame_free(&av_frame);