6 将AVFrame保存成图片
/*
* after done with return AVFrame,user must free it with av_frame_free()
*
*/
AVFrame *ConvertVideoFrameTo(AVFrame *piFrame,uint32_t nWidth, uint32_t nHeight,enum AVPixelFormat sPixFmt)
{
int nRet;
if(NULL == piFrame)
return NULL;
VideoFrameBaseParam sSrcParam = CreateVideoFrameBaseParamByAVFrame(piFrame);
VideoFrameBaseParam sDstParam = CreateVideoFrameBaseParam(nWidth,nHeight,sPixFmt);
sDstParam = GetRealVideoParamByFirst(sSrcParam,sDstParam);
VideoFrameScaler *piScale = CreateVideoFrameScaler(sSrcParam,sDstParam);
if(piScale)
{
AVFrame *piDstFrame = CreateVideoFrameAndBuffered(sDstParam);
nRet = ScaleVideo(piScale,piFrame,piDstFrame);
FreeVideoFrameScaler(&piScale);
if(nRet <= 0)
{
av_frame_free(&piDstFrame);
return NULL;
}
return piDstFrame;
}
return NULL;
}
/*
* function do not free piFrame,user must deal with it
*
* support : *.jpg *.jpeg *.png *.tiff
*
* other format is not tested,you can try
*/
int SaveAVFrameToImageFile(AVFrame *piFrame, const std::string &strFilePath)
{
if(NULL == piFrame || strFilePath.empty())
return -1;
if(piFrame->format != AV_PIX_FMT_YUV420P)
{
AVFrame *piYUV420Frame = ConvertVideoFrameTo(piFrame,0,0,AV_PIX_FMT_YUV420P);
int nRet = SaveAVFrameToImageFile(piYUV420Frame,strFilePath);
av_frame_free(&piYUV420Frame);
return nRet;
}
OutputFileHandler *piOutput = OutputFileHandler::CreateOutputFileHandler(strFilePath,NULL);
if(nullptr == piOutput)
return -2;
OutputStreamHandler *piOutStream = OutputStreamHandler::CreateOutputStreamHandlerToFile(piOutput);
EncoderSetting fnSetting = [piFrame](AVCodecContext *piCodecCxt){
piCodecCxt->width = piFrame->width;
piCodecCxt->height = piFrame->height;
piCodecCxt->pix_fmt = AV_PIX_FMT_YUVJ420P;
piCodecCxt->time_base = {1, 25};
};
EncodeHandler *piEncoder = EncodeHandler::CreateEncodeHandler(piOutput->GetAVFormatContext()->oformat->video_codec,
piOutput->GetAVFormatContext(),NULL,fnSetting);
FrameListWrapperHandler *piFrameListHandler = new FrameListWrapperHandler(false);
piFrameListHandler->AddFrame(piFrame);
piFrameListHandler->AddFinished();
piOutStream->AddSourceReadPoint(piEncoder->GetOutputReadPoints()[0]);
piEncoder->AddSourceReadPoint(piFrameListHandler->GetOutputReadPoints()[0]);
piOutput->StartSaveToFile();
delete piOutput;
delete piOutStream;
delete piEncoder;
delete piFrameListHandler;
return 0;
}
从这里可以看出,保存图片和保存成其他常见格式没有什么不同,仅在细节上有些差异。
这里用到了 FrameListWrapperHandler 这个类型,它的作用是将AVFrame封装为一个Handler,功能上和 VideoSimulationHandler 类似,只不过这里的AVFrame都是手动加入的。这样就可以和其他Handler相互配合使用了。
注意:构造 FrameListWrapperHandler 时,有一个参数,表示是否将AVFrame的所有权传递给 FrameListWrapperHandler ,如果为true,FrameListWrapperHandler 负责使用后释放对应的AVFrame,如果为false,则由用户负责释放传入的AVFrame。
注意:当调用 AddFrame() 时,FrameListWrapperHandler 并不负责检测 AVFrame中的格式,这点要注意。
注意:当没有调用 AddFinished()时,表示还有未处理的数据,此时即使 frame list wapper中已经没有了数据,返回的是AE_READ_AGAIN。而调用了AddFinished(),就表示没有更多的数据了,当frame list wapper中已经加入的Frame处理完后,会返回AE_READ_EOF。
7 将图片读取为AVFrame
AVFrame *ReadImageFileToAVFrame(const std::string &strFileName)
{
AVIOHandlerError nErr;
InputFileHandler *inputFile = InputFileHandler::CreateInputFileHandlerByFile(strFileName);
if(nullptr != inputFile)
{
InputStreamHandler *inputstream = InputStreamHandler::CreateInputStreamHandlerFromFileByMediaType(inputFile,AVMEDIA_TYPE_VIDEO);
DecodeHandler *piDecoder = DecodeHandler::CreateDecodeHandlerFromAVStream(inputstream->GetAVStream(),NULL);
piDecoder->AddSourceReadPoint(inputstream->GetOutputReadPoints()[0]);
ReadPoint *piPoint = piDecoder->GetOutputReadPoints()[0];
while(1)
{
AVFrame *piFrame;
AVPacket *piPacket;
nErr = piPoint->ReadData(piFrame,piPacket);
if(nErr == AE_READ_AGAIN)
continue;
else if(nErr != AE_NONE)
break;
VideoFrameBaseParam sParam = CreateVideoFrameBaseParam(piFrame->width,piFrame->height,(enum AVPixelFormat)piFrame->format);
AVFrame *piDstFrame = CreateVideoFrameAndBuffered(sParam);
av_frame_copy(piDstFrame,piFrame);
delete piDecoder;
delete inputstream;
delete inputFile;
return piDstFrame;
}
}
return NULL;
}
这是上一个示例的相反的操作。
有了上一个示例和这一个示例,读取gif,生成gif的功能也就很简单了,照猫画虎即可。
8 使用ReadPoint
void SaveVideoFrameToJPGFile()
{
VideoSimulateHandler *piVideoSimulator = new VideoSimulateHandler(640,480);
piVideoSimulator->SetFunctionForEOF([](int nIndex,AVFrame *piFrame){return nIndex > 5;});
ReadPoint *piPoint = piVideoSimulator->GetOutputReadPoints()[0];
AVIOHandlerError nErr;
int index = 0;
while(1)
{
AVFrame *piFrame;
AVPacket *piPacket;
nErr = piPoint->ReadData(piFrame,piPacket);
if(nErr == AE_READ_AGAIN)
continue;
else if(nErr != AE_NONE)
break;
index++;
std::string str = "frame" + std::to_string(index) + ".jpg";
SaveAVFrameToImageFile(piFrame,str);
}
delete piVideoSimulator;
}
这个示例,就是展示一下如何使用ReadPoint,Handler使用了ReadPoint,我们也可以直接使用ReadPoint。
9 Transport
void Transport()
{
OutputFileHandler *piOutput = OutputFileHandler::CreateOutputFileHandler("sio.avi");
if(piOutput)
{
InputFileHandler *inputFile = InputFileHandler::CreateInputFileHandlerByFile("03.mp4");
InputStreamHandler *inputAudioStream = InputStreamHandler::CreateInputStreamHandlerFromFileByMediaType(inputFile,AVMEDIA_TYPE_AUDIO);
InputStreamHandler *inputVideoStream = InputStreamHandler::CreateInputStreamHandlerFromFileByMediaType(inputFile,AVMEDIA_TYPE_VIDEO);
DecodeHandler *piAudioDecoder = DecodeHandler::CreateDecodeHandlerFromAVStream(inputAudioStream->GetAVStream(),NULL);
DecodeHandler *piVideoDecoder = DecodeHandler::CreateDecodeHandlerFromAVStream(inputVideoStream->GetAVStream(),NULL);
EncodeHandler *piAudioEncoder = EncodeHandler::CreateEncodeHandler(piOutput->GetAVFormatContext()->oformat->audio_codec,
piOutput->GetAVFormatContext(),NULL,NULL);
EncoderSetting setting = [=](AVCodecContext *piFmtCtx){
piFmtCtx->framerate = {40,1};
piFmtCtx->time_base = {1,40};
piFmtCtx->bit_rate = 5*1024*1024;
piFmtCtx->width = 640;
piFmtCtx->height = 480;
};
EncodeHandler *piVideoEncoder = EncodeHandler::CreateEncodeHandler(piOutput->GetAVFormatContext()->oformat->video_codec,
piOutput->GetAVFormatContext(),NULL,setting);
AudioResampleHandler *piResampler = new AudioResampleHandler(piAudioEncoder->GetAudioFrameBaseParam());
VideoScaleHandler *piScaler = new VideoScaleHandler(piVideoEncoder->GetVideoFrameBaseParam());
OutputStreamHandler *piAudioOutStream = OutputStreamHandler::CreateOutputStreamHandlerToFile(piOutput);
OutputStreamHandler *piVideoOutStream = OutputStreamHandler::CreateOutputStreamHandlerToFile(piOutput);
FilterGraphHandler *piFilter = new FilterGraphHandler;
piFilter->CreateFilterGraph();
piFilter->AddVideoBufferSource("in",CreateBufferSrcParametersByDecoder(piVideoDecoder->GetCodecContext(),
inputVideoStream->GetAVStream()->time_base));
piFilter->AddVideoBufferSink("out");
std::stringstream ss;
ss << "[in]fps=fps=" << av_q2d(piVideoEncoder->GetAVCodecContext()->framerate) << "[out]";
std::cout << ss.str() << std::endl;
std::cout << piFilter->GraphParseAndConfig(ss.str()) << std::endl;
piAudioOutStream->AddSourceReadPoint(piAudioEncoder->GetOutputReadPoints()[0]);
piAudioEncoder->AddSourceReadPoint(piResampler->GetOutputReadPoints()[0]);
piResampler->AddSourceReadPoint(piAudioDecoder->GetOutputReadPoints()[0]);
piAudioDecoder->AddSourceReadPoint(inputAudioStream->GetOutputReadPoints()[0]);
piVideoOutStream->AddSourceReadPoint(piVideoEncoder->GetOutputReadPoints()[0]);
piVideoEncoder->AddSourceReadPoint(piScaler->GetOutputReadPoints()[0]);
piScaler->AddSourceReadPoint(piFilter->GetOutputReadPoints()[0]);
piFilter->AddSourceReadPoint(piVideoDecoder->GetOutputReadPoints()[0]);
piVideoDecoder->AddSourceReadPoint(inputVideoStream->GetOutputReadPoints()[0]);
AVIOHandler::SetLogSwitch(false);
piOutput->SetHandlerName("piOutput");
piAudioOutStream->SetHandlerName("piAudioOutStream");
piAudioEncoder->SetHandlerName("piAudioEncoder");
piResampler->SetHandlerName("piResampler");
piAudioDecoder->SetHandlerName("piAudioDecoder");
inputAudioStream->SetHandlerName("inputAudioStream");
piVideoOutStream->SetHandlerName("piVideoOutStream");
piVideoEncoder->SetHandlerName("piVideoEncoder");
piScaler->SetHandlerName("piScaler");
piVideoDecoder->SetHandlerName("piVideoDecoder");
inputVideoStream->SetHandlerName("inputVideoStream");
inputFile->SetHandlerName("inputFile");
piFilter->SetHandlerName("filter");
piOutput->StartSaveToFile();
delete piOutput;
delete piAudioOutStream;
delete piVideoOutStream;
delete piAudioEncoder;
delete piAudioDecoder;
delete piVideoEncoder;
delete piVideoDecoder;
delete piResampler;
delete piScaler;
delete inputAudioStream;
delete inputVideoStream;
delete inputFile;
}
}
ffmpeg最经典,最常见的操作就是转码文件。
10 裁剪与拼接
void Concat()
{
OutputFileHandler *piOutput = OutputFileHandler::CreateOutputFileHandler("concat.mp4");
if(piOutput)
{
OutputStreamHandler *piOutVideoStream = OutputStreamHandler::CreateOutputStreamHandlerToFile(piOutput);
OutputStreamHandler *piOutAudioStream = OutputStreamHandler::CreateOutputStreamHandlerToFile(piOutput);
EncoderSetting fnVideoSetting = [=](AVCodecContext *piCodecCxt){
piCodecCxt->width = 800;
piCodecCxt->height = 600;
piCodecCxt->framerate = {40,1};
piCodecCxt->time_base = {1,40};
piCodecCxt->bit_rate = 3 * 1024 * 1024;
};
EncodeHandler *piVideoEncoder = EncodeHandler::CreateEncodeHandler(piOutput->GetAVFormatContext()->oformat->video_codec,
piOutput->GetAVFormatContext(),NULL,fnVideoSetting);
EncodeHandler *piAudioEncoder = EncodeHandler::CreateEncodeHandler(piOutput->GetAVFormatContext()->oformat->audio_codec,
piOutput->GetAVFormatContext(),NULL,NULL);
InputFileCutterHandler *piCut1 = InputFileCutterHandler::CreateInputFileCutterHandlerByFile("03.mp4");
ReadPoint *piVideoPoint1 = piCut1->AddCutterByAVMediaType(AVMEDIA_TYPE_VIDEO,"in1v");
ReadPoint *piAudioPoint1 = piCut1->AddCutterByAVMediaType(AVMEDIA_TYPE_AUDIO,"in1a");
piCut1->SetRange(60*1000,100*1000);
InputFileCutterHandler *piCut2 = InputFileCutterHandler::CreateInputFileCutterHandlerByFile("01.mp4");
ReadPoint *piVideoPoint2 = piCut2->AddCutterByAVMediaType(AVMEDIA_TYPE_VIDEO,"in2v");
ReadPoint *piAudioPoint2 = piCut2->AddCutterByAVMediaType(AVMEDIA_TYPE_AUDIO,"in2a");
piCut2->SetRange(140*1000,200*1000);
FilterGraphHandler *piFilter1 = new FilterGraphHandler;
piFilter1->CreateFilterGraph();
piFilter1->AddVideoBufferSource("in",CreateBufferSrcParametersByDecoder(piCut1->GetDecoderHandlerByReadPoint(piVideoPoint1)->GetCodecContext(),
piCut1->GetInputStreamHandlerByReadPoint(piVideoPoint1)->GetAVStream()->time_base));
piFilter1->AddVideoBufferSink("out");
std::stringstream ss;
ss << "[in]fps=fps=" << av_q2d(piVideoEncoder->GetAVCodecContext()->framerate) << "[out]";
piFilter1->GraphParseAndConfig(ss.str());
FilterGraphHandler *piFilter2 = new FilterGraphHandler;
piFilter2->CreateFilterGraph();
piFilter2->AddVideoBufferSource("in",CreateBufferSrcParametersByDecoder(piCut2->GetDecoderHandlerByReadPoint(piVideoPoint2)->GetCodecContext(),
piCut2->GetInputStreamHandlerByReadPoint(piVideoPoint2)->GetAVStream()->time_base));
piFilter2->AddVideoBufferSink("out");
piFilter2->GraphParseAndConfig(ss.str());
VideoScaleHandler *piScaler1 = new VideoScaleHandler(piVideoEncoder->GetVideoFrameBaseParam());
AudioResampleHandler *piResample1 = new AudioResampleHandler(piAudioEncoder->GetAudioFrameBaseParam());
VideoScaleHandler *piScaler2 = new VideoScaleHandler(piVideoEncoder->GetVideoFrameBaseParam());
AudioResampleHandler *piResample2 = new AudioResampleHandler(piAudioEncoder->GetAudioFrameBaseParam());
FrameConcatHandler *piVideoConcat = new FrameConcatHandler;
FrameConcatHandler *piAudioConcat = new FrameConcatHandler;
piOutVideoStream->AddSourceReadPoint(piVideoEncoder->GetOutputReadPoints()[0]);
piOutAudioStream->AddSourceReadPoint(piAudioEncoder->GetOutputReadPoints()[0]);
piVideoEncoder->AddSourceReadPoint(piVideoConcat->GetOutputReadPoints()[0]);
piAudioEncoder->AddSourceReadPoint(piAudioConcat->GetOutputReadPoints()[0]);
piVideoConcat->AddSourceReadPoint(piScaler1->GetOutputReadPoints()[0]);
piVideoConcat->AddSourceReadPoint(piScaler2->GetOutputReadPoints()[0]);
piAudioConcat->AddSourceReadPoint(piResample1->GetOutputReadPoints()[0]);
piAudioConcat->AddSourceReadPoint(piResample2->GetOutputReadPoints()[0]);
piScaler1->AddSourceReadPoint(piFilter1->GetOutputReadPoints()[0]);
piFilter1->AddSourceReadPoint(piVideoPoint1);
piResample1->AddSourceReadPoint(piAudioPoint1);
piScaler2->AddSourceReadPoint(piFilter2->GetOutputReadPoints()[0]);
piFilter2->AddSourceReadPoint(piVideoPoint2);
piResample2->AddSourceReadPoint(piAudioPoint2);
AVIOHandler::SetLogSwitch(true);
piOutput->SetHandlerName("output");
piOutAudioStream->SetHandlerName("output_audio_stream");
piOutVideoStream->SetHandlerName("output_video_stream");
piVideoEncoder->SetHandlerName("video_encoder");
piVideoEncoder->SetHandlerName("audio_encoder");
piVideoConcat->SetHandlerName("videoconcat");
piAudioConcat->SetHandlerName("audioconcat");
piFilter1->SetHandlerName("filter1");
piFilter2->SetHandlerName("filter2");
piOutput->StartSaveToFile();
delete piOutput;
delete piOutVideoStream;
delete piOutAudioStream;
delete piVideoEncoder;
delete piAudioEncoder;
delete piVideoConcat;
delete piAudioConcat;
delete piScaler1;
delete piResample1;
delete piScaler2;
delete piResample2;
delete piFilter1;
delete piFilter2;
delete piCut1;
delete piCut2;
}
}
这个示例,从两个输入中各自裁剪一部分,然后将裁剪出来的两块拼接成一个文件。