使用ffmpeg将帧编码到视频 [英] Encoding frames to video with ffmpeg

查看:1687
本文介绍了使用ffmpeg将帧编码到视频的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我正在使用C ++将虚幻引擎4中的视频编码。我可以访问单独的框架。以下代码是 viewport的显示的像素,并存储在缓冲区中。

  FRenderTarget * RenderTarget = TextureRenderTarget-> GameThread_GetRenderTargetResource(); 
FIntPoint Size = RenderTarget-> GetSizeXY();
auto ImageBytes = Size.X * Size.Y * static_cast< int32>(sizeof(FColor));
TArray< uint8>原始数据;
RawData.AddUninitialized(ImageBytes);

//获取图像原始数据。
if(!RenderTarget-> ReadPixelsPtr((FColor *)RawData.GetData()))
{
RawData.Empty();
UE_LOG(ExportRenderTargetBPFLibrary,错误,TEXT(ExportRenderTargetAsImage:无法获取原始数据));
返回false;
}

Buffer :: getInstance()。add(RawData);

虚幻引擎具有 IImageWrapperModule 从帧中获取图像,但注意视频编码。我想要的是实时编码帧实时流服务。



我发现这篇文章使用FFMPEG将屏幕截图编码成视频,这是我想要的,但是在我的情况下,我有问题可以解决这个问题。代码已经过时(例如 avcodec_encode_video 更改为 avcodec_encode_video2 ,具有不同的参数)。



Bellow是编码器的代码。

  void Compressor :: DoWork()
{
AVCodec * codec;
AVCodecContext * c = NULL;
// uint8_t * outbuf;
// int / * i,out_size,* / outbuf_size;

UE_LOG(LogTemp,Warning,TEXT(encoding));

codec = avcodec_find_encoder(AV_CODEC_ID_MPEG1VIDEO); //找到H264编码器
如果(!codec){
UE_LOG(LogTemp,Warning,TEXT(codec not found));
exit(1);
}
else UE_LOG(LogTemp,Warning,TEXT(codec found));

c = avcodec_alloc_context3(codec);
c-> bit_rate = 400000;
c-> width = 1280; //分辨率必须是两倍的倍数(1280x720),(1900x1080),(720x480)
c-> height = 720;
c-> time_base.num = 1; // framerate numerator
c-> time_base.den = 25; //帧率分母
c-> gop_size = 10; //每十个帧发出一个帧内
c-> max_b_frames = 1; //非b帧之间的b帧的最大数目
c-> keyint_min = 1; //最小GOP大小
c-> i_quant_factor =(float)0.71; // P和I帧之间的qscale因子
// c-> b_frame_strategy = 20; //// //确定这是什么
c-> qcompress =(float)0.6; /////确定这是什么
c-> qmin = 20; //最小量化器
c-> qmax = 51; //最大量化器
c-> max_qdiff = 4; //帧之间的最大量化器差异
c-> refs = 4; //参考帧数
c-> trellis = 1; // trellis RD量化
c-> pix_fmt = AV_PIX_FMT_YUV420P; //视频编码的通用像素格式
c-> codec_id = AV_CODEC_ID_MPEG1VIDEO;
c-> codec_type = AVMEDIA_TYPE_VIDEO;

if(avcodec_open2(c,codec,NULL)< 0){
UE_LOG(LogTemp,Warning,TEXT(can not open codec)); //打开编解码器
// exit(1);
}
else UE_LOG(LogTemp,Warning,TEXT(codecedened));

FString FinalFilename = FString(C:/Screen/sample.mpg);
auto& PlatformFile = FPlatformFileManager :: Get()。GetPlatformFile();
auto FileHandle = PlatformFile.OpenWrite(* FinalFilename,true);

if(FileHandle)
{
delete FileHandle; //准备时删除
UE_LOG(LogTemp,Warning,TEXT(file open));
while(true)
{
UE_LOG(LogTemp,Warning,TEXT(从缓冲区中删除));

int nbytes = avpicture_get_size(AV_PIX_FMT_YUV420P,c-> width,c-> height); //分配outbuffer
uint8_t * outbuffer =(uint8_t *)av_malloc(nbytes * sizeof(uint8_t));

AVFrame * inpic = av_frame_alloc();
AVFrame * outpic = av_frame_alloc();

outpic-> pts =(int64_t)((float)1 *(1000.0 /((float)(c-> time_base.den)))* 90); //设置帧pts
avpicture_fill((AVPicture *)inpic,(uint8_t *)Buffer :: getInstance()。remove()。GetData(),
AV_PIX_FMT_PAL8,c-> width, >高度); //填充图像与输入截图
avpicture_fill((AVPicture *)outpic,outbuffer,AV_PIX_FMT_YUV420P,c-> width,c-> height); //清除缓冲区副本的输出图片
av_image_alloc(outpic-> data,outpic-> linesize,c-> width,c-> height,c-> pix_fmt,1);

/ *
inpic-> data [0] + = inpic-> linesize [0] *(screenHeight-1);
// flipping frame
inpic-> linesize [0] = -inpic-> linesize [0];
// flipping frame

struct SwsContext * fooContext = sws_getContext(screenWidth,screenHeight,PIX_FMT_RGB32,c-> width,c-> height,PIX_FMT_YUV420P,SWS_FAST_BILINEAR,NULL,NULL,NULL );
sws_scale(fooContext,inpic-> data,inpic-> linesize,0,c-> height,outpic-> data,outpic-> linesize); //转换帧大小和格式

out_size = avcodec_encode_video(c,outbuf,outbuf_size,outpic);
//保存文件

* /

}
删除FileHandle;
}
else
{
UE_LOG(LogTemp,Warning,TEXT(Can not open file));
}
}

有人可以解释翻转框架部分(为什么要完成?)以及如何使用 avcodec_encode_video2 函数而不是 avcodec_encode_video

解决方案

不仅 avcodec_encode_video 已过期, avcodec_encode_video2 已被标记为不推荐使用一段时间。您现在应该使用新的 avcodec_send_frame avcodec_receive_packet



翻转部分对编码没有任何好处,我强烈建议不要在代码中执行此操作。如果您发现输出大小不正确,只需将 swscale 内插算法标志切换到 SWS_ACCURATE_RND



除了旧的 avcodec_encode_video API,有几个潜在的风险:




  • 要使用H264编码器,请使用 AV_CODEC_ID_H264 找到它,而不是 AV_CODEC_ID_MPEG1VIDEO ,ffmpeg libs应该用 libx264 构建。


    • 或者,如果您有nvenc支持的nvidia卡, avcodec_find_encoder_by_name(h264_nvenc)会好多了


  • 删除FileHandle 执行两次

  • 函数已被弃用了很长时间。
  • avpicture ... 使用其他功能。



如果性能至关重要,请将所有编码过程移动到独立线程而不是游戏线程。 p>

我有一些代码用于在我的自定义 GameViewportClient 类中编码UE4视口输出,类似于ffmpeg官方 muxing encode_video 示例。



MyGameViewportClient.h:

  UCLASS(Config = Game)
class FUSIONCUT_API UMyGameViewportClient:public UGameViewportClient
{
GENERATED_BODY()

public:
virtual void Draw(FViewport * Viewport,FCanvas * SceneCanvas)override;

void FirstTimeInit();

void InitCodec();

void TidyUp();

void SetAutoRecording(bool val);
void RecordNextFrame();
bool CanRecordNextFrame();
void SetRecording(bool val);
void SetLevelDelay(int32 delay);

void SetOver(bool val);
void SetAbandon(bool val);
void SetFilePath(FString out_file);
void SetThumbnail(FString thumbnail_file,int32 thumbnail_frame);
void SaveThumbnailImage();

private:
UPROPERTY(Config)
FString DeviceNum;

UPROPERTY(Config)
FString H264Crf;

UPROPERTY(Config)
int DeviceIndex;

UPROPERTY()
UFunction * ProgressFunc;

UPROPERTY()
UFunction * FinishFunc;

FIntPoint ViewportSize;
int count;

TArray< FColor> ColorBuffer;
TArray< uint8> IMG_Buffer;

struct OutputStream {
AVStream * Stream;
AVCodecContext * Ctx;

int64_t NextPts;

AVFrame *框架;

struct SwsContext * SwsCtx;
};

OutputStream VideoSt = {0};
AVOutputFormat * Fmt;
AVFormatContext * FmtCtx;
AVCodec * VideoCodec;
AVDictionary * Opt = nullptr;
SwsContext * SwsCtx;
AVPacket Pkt;

int GotOutput;
int InLineSize [1];

bool开始;
bool Over;
bool FirstTime;
bool放弃;
bool AutoRecording;
bool RecordingNextFrame;
double LastSendingTime;
std :: string FilePath;
FString UEFilePath;
int32 LevelDelay;

void EncodeAndWrite();

void CaptureFrame();
void AddStream(枚举AVCodecID编解码器);
void OpenVideo();
int WriteFrame(bool need_save_thumbnail = true);
void CloseStream();
void AllocPicture();

int FFmpegEncode(AVFrame * frame);
};

MyGameViewportClient.cpp:

  void UMyGameViewportClient :: InitCodec()
{
ViewportSize = Viewport-> GetSizeXY();

av_register_all();
avformat_alloc_output_context2(& FmtCtx,nullptr,nullptr,FilePath.c_str());
if(!FmtCtx)
{
UE_LOG(LogTemp,Error,TEXT(can not alloc format context));
返回;
}
Fmt = FmtCtx-> oformat;

// auto codec_id = AV_CODEC_ID_H264;
const char codec_name [32] =h264_nvenc;
// auto codec = avcodec_find_encoder(codec_id);
auto codec = avcodec_find_encoder_by_name(codec_name);

av_format_set_video_codec(FmtCtx,codec);

if(Fmt-> video_codec!= AV_CODEC_ID_NONE)
{
AddStream(Fmt-> video_codec);
}
OpenVideo();
VideoSt.NextPts = 0;
av_dump_format(FmtCtx,0,FilePath.c_str(),1);

if(!(Fmt-> flags& AVFMT_NOFILE))
{
auto ret = avio_open(& FmtCtx-> pb,FilePath.c_str() AVIO_FLAG_WRITE);
if(ret< 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp,Error,TEXT(无法打开%s:%s),* UEFilePath,* errstr);
返回;
}
}

auto ret = avformat_write_header(FmtCtx,& Opt);
if(ret< 0)
{
UE_LOG(LogTemp,Error,TEXT(Write header to:%s时发生错误),* UEFilePath);
返回;
}

InLineSize [0] = 4 * VideoSt.Ctx-> width;
SwsCtx = sws_getContext(VideoSt.Ctx-> width,VideoSt.Ctx-> height,AV_PIX_FMT_RGBA,
VideoSt.Ctx-> width,VideoSt.Ctx-> height,VideoSt.Ctx- > pix_fmt,
0,nullptr,nullptr,nullptr);
}

void UMyGameViewportClient :: OpenVideo()
{
auto c = VideoSt.Ctx;
AVDictionary * opt = nullptr;

av_dict_copy(& opt,Opt,0);

auto ret = avcodec_open2(c,VideoCodec,& opt);
av_dict_free(& opt);
if(ret< 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp,错误,TEXT(无法打开视频编解码器:%s),* errstr);
}

AllocPicture();
if(!VideoSt.Frame)
{
UE_LOG(LogTemp,错误,TEXT(无法分配视频帧));
返回;
}
if(avcodec_parameters_from_context(VideoSt.Stream-> codecpar,c))
{
UE_LOG(LogTemp,错误,TEXT(无法复制流参数) );
}
}

void UMyGameViewportClient :: AllocPicture()
{
VideoSt.Frame = av_frame_alloc();
if(!VideoSt.Frame)
{
UE_LOG(LogTemp,Error,TEXT(av_frame_alloc failed。));
返回;
}

VideoSt.Frame-> format = VideoSt.Ctx-> pix_fmt;
VideoSt.Frame-> width = ViewportSize.X;
VideoSt.Frame-> height = ViewportSize.Y;

if(av_frame_get_buffer(VideoSt.Frame,32)< 0)
{
UE_LOG(LogTemp,Error,TEXT(Could not allocation frame data))
}
}

void UMyGameViewportClient :: AddStream(枚举AVCodecID CodecID)
{
VideoCodec = avcodec_find_encoder(CodecID);
如果(!VideoCodec)
{
UE_LOG(LogTemp,错误,TEXT(找不到'%s'的编码器),ANSI_TO_TCHAR(avcodec_get_name(CodecID)))
}


VideoSt.Stream = avformat_new_stream(FmtCtx,nullptr);
if(!VideoSt.Stream)
{
UE_LOG(LogTemp,Error,TEXT(Could not allocate stream));
}

VideoSt.Stream-> id = FmtCtx-> nb_streams - 1;
VideoSt.Ctx = avcodec_alloc_context3(VideoCodec);
if(!VideoSt.Ctx)
{
UE_LOG(LogTemp,Error,TEXT(Could not alloc an encoding context));
}

VideoSt.Ctx-> codec_id = CodecID;
VideoSt.Ctx-> width = ViewportSize.X;
VideoSt.Ctx-> height = ViewportSize.Y;
VideoSt.Stream-> time_base = VideoSt.Ctx-> time_base = {1,FRAMERATE};
VideoSt.Ctx-> gop_size = 10;
VideoSt.Ctx-> max_b_frames = 1;
VideoSt.Ctx-> pix_fmt = AV_PIX_FMT_YUV420P;

av_opt_set(VideoSt.Ctx-> priv_data,cq,TCHAR_TO_ANSI(* H264Crf),0); //如果使用libx264
av_opt_set(VideoSt.Ctx-> priv_data,gpu,TCHAR_TO_ANSI(* DeviceNum),0),将`cq'更改为`crf` //如果使用libx264

if(FmtCtx-> oformat-> flags& AVFMT_GLOBALHEADER)
VideoSt.Ctx-> flags | = AV_CODEC_FLAG_GLOBAL_HEADER;
}

void UMyGameViewportClient :: EncodeAndWrite()
{
Pkt = {nullptr};
av_init_packet(& Pkt);

fflush(stdout);

IMG_Buffer.SetNum(ColorBuffer.Num()* 4);
uint8 * DestPtr = nullptr;
for(auto i = 0; i< ColorBuffer.Num(); i ++)
{
DestPtr =& IMG_Buffer [i * 4];
auto SrcPtr = ColorBuffer [i];
* DestPtr ++ = SrcPtr.R;
* DestPtr ++ = SrcPtr.G;
* DestPtr ++ = SrcPtr.B;
* DestPtr ++ = SrcPtr.A;
}

uint8 * inData [1] = {IMG_Buffer.GetData()};
sws_scale(SwsCtx,inData,InLineSize,0,VideoSt.Ctx-> height,VideoSt.Frame-> data,VideoSt.Frame-> linesize);

VideoSt.Frame-> pts = VideoSt.NextPts ++;
if(FFmpegEncode(VideoSt.Frame)< 0)
UE_LOG(LogTemp,Error,TEXT(Error encoding frame%d),count);

auto ret = WriteFrame();
if(ret< 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp,错误,TEXT(写入视频帧时出错:%s),* errstr);
}
av_packet_unref(& Pkt);
}

int UMyGameViewportClient :: WriteFrame()
{
av_packet_rescale_ts(& Pkt,VideoSt.Ctx-> time_base,VideoSt.Stream-> time_base );
Pkt.stream_index = VideoSt.Stream-> index;
return av_interleaved_write_frame(FmtCtx,& Pkt);
}

int UMyGameViewportClient :: FFmpegEncode(AVFrame * frame){
GotOutput = 0;
auto ret = avcodec_send_frame(VideoSt.Ctx,frame);
if(ret< 0&&& ret!= AVERROR_EOF){
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp,Warning,TEXT(发送帧错误,错误:%s),* errstr);
return -1;
}

ret = avcodec_receive_packet(VideoSt.Ctx,& Pkt);
if(ret == AVERROR(EAGAIN)|| ret == AVERROR_EOF)
return 0;

if(ret< 0)
{
auto errstr = FString(av_make_error_string(ret).c_str());
UE_LOG(LogTemp,Error,TEXT(Receive frame in receive frame,error:%s),* errstr);
av_packet_unref(& Pkt);
return -1;
}

GotOutput = 1;
return 0;
}

void UMyGameViewportClient :: CloseStream()
{
avcodec_free_context(& VideoSt.Ctx);
av_frame_free(& VideoStFrame);
sws_freeContext(SwsCtx);

if(!(Fmt-> flags& AVFMT_NOFILE))
{
auto ret = avio_closep(& FmtCtx-> pb);
if(ret< 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp,Error,TEXT(avio close failed:%s),* errstr);
}
}

avformat_free_context(FmtCtx);
}

void UMyGameViewportClient :: TidyUp()
{
/ *获取延迟帧* /
(GotOutput = 1; GotOutput; count ++ )
{
fflush(stdout);

FFmpegEncode(nullptr);

if(GotOutput)
{
auto ret = WriteFrame(false);
if(ret< 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp,错误,TEXT(写入视频帧时出错:%s),* errstr);
}
av_packet_unref(& Pkt);
}
}

auto ret = av_write_trailer(FmtCtx);
if(ret< 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp,Error,TEXT(writing trailer error:%s),* errstr);
}

CloseStream();
}

void UMyGameViewportClient :: Draw(FViewport * Viewport,FCanvas * SceneCanvas)
{
Super :: Draw(Viewport,SceneCanvas);
if(Over)//你可能需要在其他类中设置这个
{
Over = false;
TidyUp();
}

else {
CaptureFrame();
}
}

void UMyGameViewportClient :: CaptureFrame()
{
if(!Viewport){
UE_LOG(LogTemp, TEXT(No viewport));
返回;
}

if(ViewportSize.X == 0 || ViewportSize.Y == 0){
UE_LOG(LogTemp,Error,TEXT(Viewport size is 0) );
返回;
}

ColorBuffer.Empty();

if(!Viewport-> ReadPixels(ColorBuffer,FReadSurfaceDataFlags(),
FIntRect(0,ViewportSize.X,ViewportSize.Y))
{
UE_LOG(LogTemp,Error,TEXT(can not read from viewport));
返回;
}

EncodeAndWrite(); //在此
之前调用InitCodec()


I am trying to encode a video in Unreal Engine 4 with C++. I have access to the separate frames. Below is the code which reads viewport's displayed pixels and stores in buffer.

//Safely get render target resource.
FRenderTarget* RenderTarget = TextureRenderTarget->GameThread_GetRenderTargetResource();
FIntPoint Size = RenderTarget->GetSizeXY();
auto ImageBytes = Size.X* Size.Y * static_cast<int32>(sizeof(FColor));
TArray<uint8> RawData;
RawData.AddUninitialized(ImageBytes);

//Get image raw data.
if (!RenderTarget->ReadPixelsPtr((FColor*)RawData.GetData()))
{
    RawData.Empty();
    UE_LOG(ExportRenderTargetBPFLibrary, Error, TEXT("ExportRenderTargetAsImage: Failed to get raw data."));
    return false;
}

Buffer::getInstance().add(RawData);

Unreal Engine has IImageWrapperModule with which you can get an image from frame, but noting for video encoding. What I want is to encode frames in real time basis for live streaming service.

I found this post Encoding a screenshot into a video using FFMPEG which is kind of what I want, but I have problems adapting this solution for my case. The code is outdated (for example avcodec_encode_video changed to avcodec_encode_video2 with different parameters).

Bellow is the code of encoder.

void Compressor::DoWork()
{
AVCodec* codec;
AVCodecContext* c = NULL;
//uint8_t* outbuf;
//int /*i, out_size,*/ outbuf_size;

UE_LOG(LogTemp, Warning, TEXT("encoding"));

codec = avcodec_find_encoder(AV_CODEC_ID_MPEG1VIDEO);            // finding the H264 encoder
if (!codec) {
    UE_LOG(LogTemp, Warning, TEXT("codec not found"));
    exit(1);
}
else UE_LOG(LogTemp, Warning, TEXT("codec found"));

c = avcodec_alloc_context3(codec);
c->bit_rate = 400000;
c->width = 1280;                                        // resolution must be a multiple of two (1280x720),(1900x1080),(720x480)
c->height = 720;
c->time_base.num = 1;                                   // framerate numerator
c->time_base.den = 25;                                  // framerate denominator
c->gop_size = 10;                                       // emit one intra frame every ten frames
c->max_b_frames = 1;                                    // maximum number of b-frames between non b-frames
c->keyint_min = 1;                                      // minimum GOP size
c->i_quant_factor = (float)0.71;                        // qscale factor between P and I frames
//c->b_frame_strategy = 20;                               ///// find out exactly what this does
c->qcompress = (float)0.6;                              ///// find out exactly what this does
c->qmin = 20;                                           // minimum quantizer
c->qmax = 51;                                           // maximum quantizer
c->max_qdiff = 4;                                       // maximum quantizer difference between frames
c->refs = 4;                                            // number of reference frames
c->trellis = 1;                                         // trellis RD Quantization
c->pix_fmt = AV_PIX_FMT_YUV420P;                           // universal pixel format for video encoding
c->codec_id = AV_CODEC_ID_MPEG1VIDEO;
c->codec_type = AVMEDIA_TYPE_VIDEO;

if (avcodec_open2(c, codec, NULL) < 0) {
    UE_LOG(LogTemp, Warning, TEXT("could not open codec"));         // opening the codec
    //exit(1);
}
else UE_LOG(LogTemp, Warning, TEXT("codec oppened"));

FString FinalFilename = FString("C:/Screen/sample.mpg");
auto &PlatformFile = FPlatformFileManager::Get().GetPlatformFile();
auto FileHandle = PlatformFile.OpenWrite(*FinalFilename, true);

if (FileHandle)
{
    delete FileHandle; // remove when ready
    UE_LOG(LogTemp, Warning, TEXT("file opened"));
    while (true)
    {
        UE_LOG(LogTemp, Warning, TEXT("removing from buffer"));

        int nbytes = avpicture_get_size(AV_PIX_FMT_YUV420P, c->width, c->height);                                      // allocating outbuffer
        uint8_t* outbuffer = (uint8_t*)av_malloc(nbytes * sizeof(uint8_t));

        AVFrame* inpic = av_frame_alloc();
        AVFrame* outpic = av_frame_alloc();

        outpic->pts = (int64_t)((float)1 * (1000.0 / ((float)(c->time_base.den))) * 90);                              // setting frame pts
        avpicture_fill((AVPicture*)inpic, (uint8_t*)Buffer::getInstance().remove().GetData(),
            AV_PIX_FMT_PAL8, c->width, c->height); // fill image with input screenshot
        avpicture_fill((AVPicture*)outpic, outbuffer, AV_PIX_FMT_YUV420P, c->width, c->height);                        // clear output picture for buffer copy
        av_image_alloc(outpic->data, outpic->linesize, c->width, c->height, c->pix_fmt, 1);

        /* 
        inpic->data[0] += inpic->linesize[0]*(screenHeight-1);                                                      
        // flipping frame
        inpic->linesize[0] = -inpic->linesize[0];                                                                   
        // flipping frame

        struct SwsContext* fooContext = sws_getContext(screenWidth, screenHeight, PIX_FMT_RGB32, c->width, c->height, PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL);
        sws_scale(fooContext, inpic->data, inpic->linesize, 0, c->height, outpic->data, outpic->linesize);          // converting frame size and format

        out_size = avcodec_encode_video(c, outbuf, outbuf_size, outpic);                                            
        // save in file

        */

    }
    delete FileHandle;
}
else
{
    UE_LOG(LogTemp, Warning, TEXT("Can't open file"));
}
}

Can someone explain flipping frame part (why it's done?) and how to use avcodec_encode_video2 function instead of avcodec_encode_video?

解决方案

Not only avcodec_encode_video is out dated, avcodec_encode_video2 has been tagged with deprecated for a while too. You should use the new avcodec_send_frame and avcodec_receive_packet for encoding now.

The "flipping" part doesn't do any good for encoding, and I strongly suggest don't do that in your code. If you find the output size is not right, just switch swscale interpolation algorithm flag to SWS_ACCURATE_RND.

Besides old avcodec_encode_video API, there are several potential risks:

  • To use H264 encoder, find it with AV_CODEC_ID_H264, not AV_CODEC_ID_MPEG1VIDEO, also the ffmpeg libs should be built with libx264.
    • Or, if you have a working nvidia card with nvenc support, avcodec_find_encoder_by_name("h264_nvenc") will be much better.
  • delete FileHandle is executed twice.
  • avpicture... functions has been deprecated for a long time. Use other functions instead.

And if performance is critical, move all encoding process to a independent thread instead of the game thread.

I have some codes for encoding UE4 viewport output in my custom GameViewportClient class, which are similar to ffmpeg official muxing and encode_video example.

MyGameViewportClient.h:

UCLASS(Config=Game)
class FUSIONCUT_API UMyGameViewportClient : public UGameViewportClient
{
    GENERATED_BODY()

public:
    virtual void Draw(FViewport* Viewport, FCanvas* SceneCanvas) override;

    void FirstTimeInit();

    void InitCodec();

    void TidyUp();

    void SetAutoRecording(bool val);
    void RecordNextFrame();
    bool CanRecordNextFrame();
    void SetRecording(bool val);
    void SetLevelDelay(int32 delay);

    void SetOver(bool val);
    void SetAbandon(bool val);
    void SetFilePath(FString out_file);
    void SetThumbnail(FString thumbnail_file, int32 thumbnail_frame);
    void SaveThumbnailImage();

private:
    UPROPERTY(Config)
    FString DeviceNum;

    UPROPERTY(Config)
    FString H264Crf;

    UPROPERTY(Config)
    int DeviceIndex;

    UPROPERTY()
    UFunction* ProgressFunc;

    UPROPERTY()
    UFunction* FinishFunc;

    FIntPoint ViewportSize;
    int count;

    TArray<FColor> ColorBuffer;
    TArray<uint8> IMG_Buffer;

    struct OutputStream {
        AVStream* Stream;
        AVCodecContext* Ctx;

        int64_t NextPts;

        AVFrame* Frame;

        struct SwsContext* SwsCtx;
    };

    OutputStream VideoSt = { 0 };
    AVOutputFormat* Fmt;
    AVFormatContext* FmtCtx;
    AVCodec* VideoCodec;
    AVDictionary* Opt = nullptr;
    SwsContext* SwsCtx;
    AVPacket Pkt;

    int GotOutput;
    int InLineSize[1];

    bool Start;
    bool Over;
    bool FirstTime;
    bool Abandon;
    bool AutoRecording;
    bool RecordingNextFrame;
    double LastSendingTime;
    std::string FilePath;
    FString UEFilePath;
    int32 LevelDelay;

    void EncodeAndWrite();

    void CaptureFrame();
    void AddStream(enum AVCodecID CodecID);
    void OpenVideo();
    int WriteFrame(bool need_save_thumbnail = true);
    void CloseStream();
    void AllocPicture();

    int FFmpegEncode(AVFrame *frame);
};

MyGameViewportClient.cpp:

void UMyGameViewportClient::InitCodec()
{
    ViewportSize = Viewport->GetSizeXY();

    av_register_all();
    avformat_alloc_output_context2(&FmtCtx, nullptr, nullptr, FilePath.c_str());
    if (!FmtCtx)
    {
        UE_LOG(LogTemp, Error, TEXT("cannot alloc format context"));
        return;
    }
    Fmt = FmtCtx->oformat;

    //auto codec_id = AV_CODEC_ID_H264;
    const char codec_name[32] = "h264_nvenc";
    //auto codec = avcodec_find_encoder(codec_id);
    auto codec = avcodec_find_encoder_by_name(codec_name);

    av_format_set_video_codec(FmtCtx, codec);

    if (Fmt->video_codec != AV_CODEC_ID_NONE)
    {
        AddStream(Fmt->video_codec);
    }
    OpenVideo();
    VideoSt.NextPts = 0;
    av_dump_format(FmtCtx, 0, FilePath.c_str(), 1);

    if (!(Fmt->flags & AVFMT_NOFILE))
    {
        auto ret = avio_open(&FmtCtx->pb, FilePath.c_str(), AVIO_FLAG_WRITE);
        if (ret < 0)
        {
            auto errstr = FString(av_err2str(ret));
            UE_LOG(LogTemp, Error, TEXT("Could not open %s: %s"), *UEFilePath, *errstr);
            return;
        }
    }

    auto ret = avformat_write_header(FmtCtx, &Opt);
    if (ret < 0)
    {
        UE_LOG(LogTemp, Error, TEXT("Error occurred when writing header to: %s"), *UEFilePath);
        return;
    }

    InLineSize[0] = 4 * VideoSt.Ctx->width;
    SwsCtx = sws_getContext(VideoSt.Ctx->width, VideoSt.Ctx->height, AV_PIX_FMT_RGBA,
                            VideoSt.Ctx->width, VideoSt.Ctx->height, VideoSt.Ctx->pix_fmt,
                            0, nullptr, nullptr, nullptr);
}

void UMyGameViewportClient::OpenVideo()
{
    auto c = VideoSt.Ctx;
    AVDictionary* opt = nullptr;

    av_dict_copy(&opt, Opt, 0);

    auto ret = avcodec_open2(c, VideoCodec, &opt);
    av_dict_free(&opt);
    if (ret < 0)
    {
        auto errstr = FString(av_err2str(ret));
        UE_LOG(LogTemp, Error, TEXT("Could not open video codec: %s"), *errstr);
    }

    AllocPicture();
    if (!VideoSt.Frame)
    {
        UE_LOG(LogTemp, Error, TEXT("Could not allocate video frame"));
        return;
    }
    if (avcodec_parameters_from_context(VideoSt.Stream->codecpar, c))
    {
        UE_LOG(LogTemp, Error, TEXT("Could not copy the stream parameters"));
    }
}

void UMyGameViewportClient::AllocPicture()
{
    VideoSt.Frame = av_frame_alloc();
    if (!VideoSt.Frame)
    {
        UE_LOG(LogTemp, Error, TEXT("av_frame_alloc failed."));
        return;
    }

    VideoSt.Frame->format = VideoSt.Ctx->pix_fmt;
    VideoSt.Frame->width = ViewportSize.X;
    VideoSt.Frame->height = ViewportSize.Y;

    if (av_frame_get_buffer(VideoSt.Frame, 32) < 0)
    {
        UE_LOG(LogTemp, Error, TEXT("Could not allocate frame data"));
    }
}

void UMyGameViewportClient::AddStream(enum AVCodecID CodecID)
{
    VideoCodec = avcodec_find_encoder(CodecID);
    if (!VideoCodec)
    {
        UE_LOG(LogTemp, Error, TEXT("Could not find encoder for '%s'"), ANSI_TO_TCHAR(avcodec_get_name(CodecID)));
    }


    VideoSt.Stream = avformat_new_stream(FmtCtx, nullptr);
    if (!VideoSt.Stream)
    {
        UE_LOG(LogTemp, Error, TEXT("Could not allocate stream"));
    }

    VideoSt.Stream->id = FmtCtx->nb_streams - 1;
    VideoSt.Ctx = avcodec_alloc_context3(VideoCodec);
    if (!VideoSt.Ctx)
    {
        UE_LOG(LogTemp, Error, TEXT("Could not alloc an encoding context"));
    }

    VideoSt.Ctx->codec_id = CodecID;
    VideoSt.Ctx->width = ViewportSize.X;
    VideoSt.Ctx->height = ViewportSize.Y;
    VideoSt.Stream->time_base = VideoSt.Ctx->time_base = { 1, FRAMERATE };
    VideoSt.Ctx->gop_size = 10;
    VideoSt.Ctx->max_b_frames = 1;
    VideoSt.Ctx->pix_fmt = AV_PIX_FMT_YUV420P;

    av_opt_set(VideoSt.Ctx->priv_data, "cq", TCHAR_TO_ANSI(*H264Crf), 0);  // change `cq` to `crf` if using libx264
    av_opt_set(VideoSt.Ctx->priv_data, "gpu", TCHAR_TO_ANSI(*DeviceNum), 0); // comment this line if using libx264

    if (FmtCtx->oformat->flags & AVFMT_GLOBALHEADER)
        VideoSt.Ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}

void UMyGameViewportClient::EncodeAndWrite()
{
    Pkt = { nullptr };
    av_init_packet(&Pkt);

    fflush(stdout);

    IMG_Buffer.SetNum(ColorBuffer.Num() * 4);
    uint8* DestPtr = nullptr;
    for (auto i = 0; i < ColorBuffer.Num(); i++)
    {
        DestPtr = &IMG_Buffer[i * 4];
        auto SrcPtr = ColorBuffer[i];
        *DestPtr++ = SrcPtr.R;
        *DestPtr++ = SrcPtr.G;
        *DestPtr++ = SrcPtr.B;
        *DestPtr++ = SrcPtr.A;
    }

    uint8* inData[1] = { IMG_Buffer.GetData() };
    sws_scale(SwsCtx, inData, InLineSize, 0, VideoSt.Ctx->height, VideoSt.Frame->data, VideoSt.Frame->linesize);

    VideoSt.Frame->pts = VideoSt.NextPts++;
    if (FFmpegEncode(VideoSt.Frame) < 0)
        UE_LOG(LogTemp, Error, TEXT("Error encoding frame %d"), count);

    auto ret = WriteFrame();
    if (ret < 0)
    {
        auto errstr = FString(av_err2str(ret));
        UE_LOG(LogTemp, Error, TEXT("Error while writing video frame: %s"), *errstr);
    }
    av_packet_unref(&Pkt);
}

int UMyGameViewportClient::WriteFrame()
{
    av_packet_rescale_ts(&Pkt, VideoSt.Ctx->time_base, VideoSt.Stream->time_base);
    Pkt.stream_index = VideoSt.Stream->index;
    return av_interleaved_write_frame(FmtCtx, &Pkt);
}

int UMyGameViewportClient::FFmpegEncode(AVFrame *frame) {
    GotOutput = 0;
    auto ret = avcodec_send_frame(VideoSt.Ctx, frame);
    if (ret < 0 && ret != AVERROR_EOF) {
        auto errstr = FString(av_err2str(ret));
        UE_LOG(LogTemp, Warning, TEXT("error during sending frame, error : %s"), *errstr);
        return -1;
    }

    ret = avcodec_receive_packet(VideoSt.Ctx, &Pkt);
    if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
        return 0;

    if (ret < 0)
    {
        auto errstr = FString(av_make_error_string(ret).c_str());
        UE_LOG(LogTemp, Error, TEXT("Error during receiving frame, error : %s"), *errstr);
        av_packet_unref(&Pkt);
        return -1;
    }

    GotOutput = 1;
    return 0;
}

void UMyGameViewportClient::CloseStream()
{
    avcodec_free_context(&VideoSt.Ctx);
    av_frame_free(&VideoSt.Frame);
    sws_freeContext(SwsCtx);

    if (!(Fmt->flags & AVFMT_NOFILE))
    {
        auto ret = avio_closep(&FmtCtx->pb);
        if (ret < 0)
        {
            auto errstr = FString(av_err2str(ret));
            UE_LOG(LogTemp, Error, TEXT("avio close failed: %s"), *errstr);
        }
    }

    avformat_free_context(FmtCtx);
}

void UMyGameViewportClient::TidyUp()
{
    /* get the delayed frames */
    for (GotOutput = 1; GotOutput; count++)
    {
        fflush(stdout);

        FFmpegEncode(nullptr);

        if (GotOutput)
        {
            auto ret = WriteFrame(false);
            if (ret < 0)
            {
                auto errstr = FString(av_err2str(ret));
                UE_LOG(LogTemp, Error, TEXT("Error while writing video frame: %s"), *errstr);
            }
            av_packet_unref(&Pkt);
        }
    }

    auto ret = av_write_trailer(FmtCtx);
    if (ret < 0)
    {
        auto errstr = FString(av_err2str(ret));
        UE_LOG(LogTemp, Error, TEXT("writing trailer error: %s"), *errstr);
    }

    CloseStream();
}

void UMyGameViewportClient::Draw(FViewport* Viewport, FCanvas* SceneCanvas)
{
    Super::Draw(Viewport, SceneCanvas);
    if (Over)  // You may need to set this in other class
    {
        Over = false;
        TidyUp();
    }

    else {
        CaptureFrame();
    }
}

void UMyGameViewportClient::CaptureFrame()
{
    if (!Viewport) {
        UE_LOG(LogTemp, Error, TEXT("No viewport"));
        return;
    }

    if (ViewportSize.X == 0 || ViewportSize.Y == 0) {
        UE_LOG(LogTemp, Error, TEXT("Viewport size is 0"));
        return;
    }

    ColorBuffer.Empty();

    if (!Viewport->ReadPixels(ColorBuffer, FReadSurfaceDataFlags(),
                              FIntRect(0, 0, ViewportSize.X, ViewportSize.Y)))
    {
        UE_LOG(LogTemp, Error, TEXT("Cannot read from viewport"));
        return;
    }

    EncodeAndWrite();  // call InitCodec() before this
}

这篇关于使用ffmpeg将帧编码到视频的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆