ffmpeg截取指定时间范围内的视频

ffmpeg提供了如下命令行用于截取指定时间范围内的视频:
ffmpeg -i c:\record\out_computer.mp4 -ss 00:00:10 -to 00:00:39 out_computer_2second.mp4

我跟了下ffmpeg命令行代码,没有使用滤镜,直接读取的视频AVPacket,用里面的pts判断是否在时间范围内。
如果读取到的pts小于起始时间,则读取下一帧,接着判断。
如果读取到的pts大于结束时间,则结束。

本人为此专门写了一个工程,该工程考虑了只有视频,没音频的文件。
main函数的内容如下,通俗易懂:

int main()
{
	CTrimFile cVideoTrim;
	const char *pFileA = "c:\\record\\out-computer.mp4";

	const char *pFileOut = "c:\\record\\out-computer_trimmed.mp4";

	std::string strBeginTime = "00:00:10";
	std::string strEndTime = "00:00:20";


	int iStartTime = TimeToSeconds(strBeginTime);
	int iEndTime = TimeToSeconds(strEndTime);

	int iDuration = iEndTime - iStartTime;


	cVideoTrim.StartTrim(pFileA, pFileOut, iStartTime, iDuration);
	cVideoTrim.WaitFinish();
	return 0;
}

下面是读取文件的部分代码,红色方框显示了帧的过滤逻辑
在这里插入图片描述

代码结构如下:
在这里插入图片描述
FfmpegTrimTest.cpp的内容如下:

#include <iostream>
#include "TrimFile.h"
#include <vector>

#ifdef	__cplusplus
extern "C"
{
#endif

#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avutil.lib")
#pragma comment(lib, "avdevice.lib")
#pragma comment(lib, "avfilter.lib")
#pragma comment(lib, "postproc.lib")
#pragma comment(lib, "swresample.lib")
#pragma comment(lib, "swscale.lib")


#ifdef __cplusplus
};
#endif


void SeparateString(const char* pStr, std::vector<int>& vecResult, char chKey)
{
	if (pStr == NULL || pStr[0] == 0)
	{
		return;
	}

	std::string tmp = pStr;
	if (tmp.at(tmp.length() - 1) != chKey)
	{
		tmp += chKey;
	}
	size_t beginIndex = 0;
	static const size_t npos = size_t(-1);
	size_t indexCh1a = tmp.find(chKey, beginIndex);

	std::string tmpBuf;

	while (indexCh1a != npos)
	{
		int len = static_cast<int>(indexCh1a - beginIndex);
		if (len > 0)
		{
			tmpBuf.clear();
			tmpBuf.append(tmp.c_str() + beginIndex, static_cast<size_t>(len));
			vecResult.push_back(atoi(tmpBuf.c_str()));
		}
		beginIndex = ++indexCh1a;
		indexCh1a = tmp.find(chKey, beginIndex);
	}
}


///00:00:10这种类型的时间转化为秒数
int TimeToSeconds(std::string strTime)
{
	int iRetSecond = 0;
	std::vector<int> vecTime;
	SeparateString(strTime.c_str(), vecTime, ':');

	if (vecTime.size() != 3)
	{
		return iRetSecond;
	}

	int iHour = vecTime[0];
	int iMinute = vecTime[1];
	int iSecond = vecTime[2];

	iRetSecond = iHour * 3600 + iMinute * 60 + iSecond;

	return iRetSecond;
}


int main()
{
	CTrimFile cVideoTrim;
	const char *pFileA = "c:\\record\\out-computer.mp4";

	const char *pFileOut = "c:\\record\\out-computer_trimmed.mp4";

	std::string strBeginTime = "00:00:10";
	std::string strEndTime = "00:00:20";


	int iStartTime = TimeToSeconds(strBeginTime);
	int iEndTime = TimeToSeconds(strEndTime);

	int iDuration = iEndTime - iStartTime;


	cVideoTrim.StartTrim(pFileA, pFileOut, iStartTime, iDuration);
	cVideoTrim.WaitFinish();
	return 0;
}



TrimFile.h的内容如下:

#pragma once

#include <Windows.h>

#ifdef	__cplusplus
extern "C"
{
#endif
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libavdevice/avdevice.h"
#include "libavutil/audio_fifo.h"
#include "libavutil/avutil.h"
#include "libavutil/fifo.h"
#include "libavutil/frame.h"
#include "libavutil/imgutils.h"

#include "libavfilter/avfilter.h"
#include "libavfilter/buffersink.h"
#include "libavfilter/buffersrc.h"


#ifdef __cplusplus
};
#endif

class CTrimFile
{
public:
	CTrimFile();
	~CTrimFile();
public:
	int StartTrim(const char *pFileA, const char *pFileOut, int iStartTime, int iDuration);
	int WaitFinish();
private:
	int OpenFileA(const char *pFileA);
	int OpenOutPut(const char *pFileOut);
private:
	static DWORD WINAPI VideoAReadProc(LPVOID lpParam);
	void VideoARead();


	static DWORD WINAPI VideoTrimProc(LPVOID lpParam);
	void VideoTrim();
private:
	AVFormatContext *m_pFormatCtx_FileA = NULL;

	AVCodecContext *m_pReadCodecCtx_VideoA = NULL;
	AVCodec *m_pReadCodec_VideoA = NULL;


	AVCodecContext	*m_pCodecEncodeCtx_Video = NULL;
	AVFormatContext *m_pFormatCtx_Out = NULL;

	AVFifoBuffer *m_pVideoAFifo = NULL;


	int m_iVideoWidth = 1920;
	int m_iVideoHeight = 1080;
	int m_iYuv420FrameSize = 0;
private:
	CRITICAL_SECTION m_csVideoASection;
	HANDLE m_hVideoAReadThread = NULL;
	HANDLE m_hVideoTrimThread = NULL;

	int64_t m_i64TrimStartTime = 0;
	int64_t m_i64TrimDuration = 0;

	int64_t m_i64VideoStartTime = 0;
	AVRational m_streamTimeBase;
};




TrimFile.cpp的内容如下:


#include "TrimFile.h"
//#include "log/log.h"





CTrimFile::CTrimFile()
{
	InitializeCriticalSection(&m_csVideoASection);
}

CTrimFile::~CTrimFile()
{
	DeleteCriticalSection(&m_csVideoASection);
}

int CTrimFile::StartTrim(const char *pFileA, const char *pFileOut, int iStartTime, int iDuration)
{
	int ret = -1;
	do
	{
		ret = OpenFileA(pFileA);
		if (ret != 0)
		{
			break;
		}

		m_i64TrimStartTime = iStartTime * m_streamTimeBase.den / m_streamTimeBase.num;
		m_i64TrimDuration = iDuration * m_streamTimeBase.den / m_streamTimeBase.num;

		ret = OpenOutPut(pFileOut);
		if (ret != 0)
		{
			break;
		}

		char szFilterDesc[512] = { 0 };

		_snprintf(szFilterDesc, sizeof(szFilterDesc),
			"[inPad]pad=%d:%d[inPad0];[in0]trim=start=%d:duration=%d[in00];[inPad0][in00]overlay=0[out]",
			m_iVideoWidth, m_iVideoHeight, iStartTime, iDuration);

		_snprintf(szFilterDesc, sizeof(szFilterDesc),
			"[inPad]pad=%d:%d[inPad0];[in0]trim=start=%d:end=%d[in00];[inPad0][in00]overlay=0[out]",
			m_iVideoWidth, m_iVideoHeight, iStartTime, iStartTime + iDuration);

		m_iYuv420FrameSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, m_pReadCodecCtx_VideoA->width, m_pReadCodecCtx_VideoA->height, 1);
		//申请30帧缓存
		m_pVideoAFifo = av_fifo_alloc(30 * m_iYuv420FrameSize);

		m_hVideoAReadThread = CreateThread(NULL, 0, VideoAReadProc, this, 0, NULL);

		m_hVideoTrimThread = CreateThread(NULL, 0, VideoTrimProc, this, 0, NULL);

	} while (0);

	return ret;
}

int CTrimFile::WaitFinish()
{
	int ret = 0;
	do
	{
		if (NULL == m_hVideoAReadThread)
		{
			break;
		}
		WaitForSingleObject(m_hVideoAReadThread, INFINITE);

		CloseHandle(m_hVideoAReadThread);
		m_hVideoAReadThread = NULL;

		WaitForSingleObject(m_hVideoTrimThread, INFINITE);
		CloseHandle(m_hVideoTrimThread);
		m_hVideoTrimThread = NULL;
	} while (0);

	return ret;
}

int CTrimFile::OpenFileA(const char *pFileA)
{
	int ret = -1;

	do
	{
		if ((ret = avformat_open_input(&m_pFormatCtx_FileA, pFileA, 0, 0)) < 0) {
			printf("Could not open input file.");
			break;
		}
		if ((ret = avformat_find_stream_info(m_pFormatCtx_FileA, 0)) < 0) {
			printf("Failed to retrieve input stream information");
			break;
		}

		if (m_pFormatCtx_FileA->streams[0]->codecpar->codec_type != AVMEDIA_TYPE_VIDEO)
		{
			break;
		}

		m_i64VideoStartTime = m_pFormatCtx_FileA->streams[0]->start_time;
		m_streamTimeBase = m_pFormatCtx_FileA->streams[0]->time_base;

		m_pReadCodec_VideoA = (AVCodec *)avcodec_find_decoder(m_pFormatCtx_FileA->streams[0]->codecpar->codec_id);

		m_pReadCodecCtx_VideoA = avcodec_alloc_context3(m_pReadCodec_VideoA);

		if (m_pReadCodecCtx_VideoA == NULL)
		{
			break;
		}
		avcodec_parameters_to_context(m_pReadCodecCtx_VideoA, m_pFormatCtx_FileA->streams[0]->codecpar);

		m_iVideoWidth = m_pReadCodecCtx_VideoA->width;
		m_iVideoHeight = m_pReadCodecCtx_VideoA->height;

		m_pReadCodecCtx_VideoA->framerate = m_pFormatCtx_FileA->streams[0]->r_frame_rate;

		if (avcodec_open2(m_pReadCodecCtx_VideoA, m_pReadCodec_VideoA, NULL) < 0)
		{
			break;
		}

		ret = 0;
	} while (0);


	return ret;
}


int CTrimFile::OpenOutPut(const char *pFileOut)
{
	int iRet = -1;

	AVStream *pAudioStream = NULL;
	AVStream *pVideoStream = NULL;

	do
	{
		avformat_alloc_output_context2(&m_pFormatCtx_Out, NULL, NULL, pFileOut);

		{
			AVCodec* pCodecEncode_Video = (AVCodec *)avcodec_find_encoder(m_pFormatCtx_Out->oformat->video_codec);

			m_pCodecEncodeCtx_Video = avcodec_alloc_context3(pCodecEncode_Video);
			if (!m_pCodecEncodeCtx_Video)
			{
				break;
			}

			pVideoStream = avformat_new_stream(m_pFormatCtx_Out, pCodecEncode_Video);
			if (!pVideoStream)
			{
				break;
			}

			int frameRate = 10;
			m_pCodecEncodeCtx_Video->flags |= AV_CODEC_FLAG_QSCALE;
			m_pCodecEncodeCtx_Video->bit_rate = 4000000;
			m_pCodecEncodeCtx_Video->rc_min_rate = 4000000;
			m_pCodecEncodeCtx_Video->rc_max_rate = 4000000;
			m_pCodecEncodeCtx_Video->bit_rate_tolerance = 4000000;
			m_pCodecEncodeCtx_Video->time_base.den = frameRate;
			m_pCodecEncodeCtx_Video->time_base.num = 1;

			m_pCodecEncodeCtx_Video->width = m_iVideoWidth;
			m_pCodecEncodeCtx_Video->height = m_iVideoHeight;
			//pH264Encoder->pCodecCtx->frame_number = 1;
			m_pCodecEncodeCtx_Video->gop_size = 12;
			m_pCodecEncodeCtx_Video->max_b_frames = 0;
			m_pCodecEncodeCtx_Video->thread_count = 4;
			m_pCodecEncodeCtx_Video->pix_fmt = AV_PIX_FMT_YUV420P;
			m_pCodecEncodeCtx_Video->codec_id = AV_CODEC_ID_H264;
			m_pCodecEncodeCtx_Video->codec_type = AVMEDIA_TYPE_VIDEO;

			av_opt_set(m_pCodecEncodeCtx_Video->priv_data, "b-pyramid", "none", 0);
			av_opt_set(m_pCodecEncodeCtx_Video->priv_data, "preset", "superfast", 0);
			av_opt_set(m_pCodecEncodeCtx_Video->priv_data, "tune", "zerolatency", 0);

			if (m_pFormatCtx_Out->oformat->flags & AVFMT_GLOBALHEADER)
				m_pCodecEncodeCtx_Video->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;

			if (avcodec_open2(m_pCodecEncodeCtx_Video, pCodecEncode_Video, 0) < 0)
			{
				//编码器打开失败,退出程序
				break;
			}
		}

		if (!(m_pFormatCtx_Out->oformat->flags & AVFMT_NOFILE))
		{
			if (avio_open(&m_pFormatCtx_Out->pb, pFileOut, AVIO_FLAG_WRITE) < 0)
			{
				break;
			}
		}

		avcodec_parameters_from_context(pVideoStream->codecpar, m_pCodecEncodeCtx_Video);

		if (avformat_write_header(m_pFormatCtx_Out, NULL) < 0)
		{
			break;
		}

		iRet = 0;
	} while (0);


	if (iRet != 0)
	{
		if (m_pCodecEncodeCtx_Video != NULL)
		{
			avcodec_free_context(&m_pCodecEncodeCtx_Video);
			m_pCodecEncodeCtx_Video = NULL;
		}

		if (m_pFormatCtx_Out != NULL)
		{
			avformat_free_context(m_pFormatCtx_Out);
			m_pFormatCtx_Out = NULL;
		}
	}

	return iRet;
}


DWORD WINAPI CTrimFile::VideoAReadProc(LPVOID lpParam)
{
	CTrimFile *pVideoMerge = (CTrimFile *)lpParam;
	if (pVideoMerge != NULL)
	{
		pVideoMerge->VideoARead();
	}
	return 0;
}

void CTrimFile::VideoARead()
{
	AVFrame *pFrame;
	pFrame = av_frame_alloc();

	int y_size = m_pReadCodecCtx_VideoA->width * m_pReadCodecCtx_VideoA->height;

	char *pY = new char[y_size];
	char *pU = new char[y_size / 4];
	char *pV = new char[y_size / 4];

	AVPacket packet = { 0 };
	int ret = 0;
	while (1)
	{
		av_packet_unref(&packet);

		ret = av_read_frame(m_pFormatCtx_FileA, &packet);
		if (ret == AVERROR(EAGAIN))
		{
			continue;
		}
		else if (ret == AVERROR_EOF)
		{
			break;
		}
		else if (ret < 0)
		{
			break;
		}

		ret = avcodec_send_packet(m_pReadCodecCtx_VideoA, &packet);

		if (ret >= 0)
		{
			ret = avcodec_receive_frame(m_pReadCodecCtx_VideoA, pFrame);
			if (ret == AVERROR(EAGAIN))
			{
				continue;
			}
			else if (ret == AVERROR_EOF)
			{
				break;
			}
			else if (ret < 0) {
				break;
			}

			if (packet.pts - m_i64VideoStartTime < m_i64TrimStartTime)
			{
				continue;
			}

			if (packet.pts - m_i64VideoStartTime > m_i64TrimStartTime + m_i64TrimDuration)
			{
				break;
			}

			while (1)
			{
				if (av_fifo_space(m_pVideoAFifo) >= m_iYuv420FrameSize)
				{
					///Y
					int contY = 0;
					for (int i = 0; i < pFrame->height; i++)
					{
						memcpy(pY + contY, pFrame->data[0] + i * pFrame->linesize[0], pFrame->width);
						contY += pFrame->width;
					}


					///U
					int contU = 0;
					for (int i = 0; i < pFrame->height / 2; i++)
					{
						memcpy(pU + contU, pFrame->data[1] + i * pFrame->linesize[1], pFrame->width / 2);
						contU += pFrame->width / 2;
					}


					///V
					int contV = 0;
					for (int i = 0; i < pFrame->height / 2; i++)
					{
						memcpy(pV + contV, pFrame->data[2] + i * pFrame->linesize[2], pFrame->width / 2);
						contV += pFrame->width / 2;
					}


					EnterCriticalSection(&m_csVideoASection);
					av_fifo_generic_write(m_pVideoAFifo, pY, y_size, NULL);
					av_fifo_generic_write(m_pVideoAFifo, pU, y_size / 4, NULL);
					av_fifo_generic_write(m_pVideoAFifo, pV, y_size / 4, NULL);
					LeaveCriticalSection(&m_csVideoASection);

					break;
				}
				else
				{
					Sleep(100);
				}
			}

		}


		if (ret == AVERROR(EAGAIN))
		{
			continue;
		}
	}

	av_frame_free(&pFrame);
	delete[] pY;
	delete[] pU;
	delete[] pV;
}

DWORD WINAPI CTrimFile::VideoTrimProc(LPVOID lpParam)
{
	CTrimFile *pVideoMerge = (CTrimFile *)lpParam;
	if (pVideoMerge != NULL)
	{
		pVideoMerge->VideoTrim();
	}
	return 0;
}


void CTrimFile::VideoTrim()
{
	int ret = 0;

	int iYuv420PadFrameSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, m_iVideoWidth, m_iVideoHeight, 1);
	int y_size = m_iVideoWidth * m_iVideoHeight;


	DWORD dwBeginTime = ::GetTickCount();


	AVFrame *pFrameVideoA = av_frame_alloc();
	uint8_t *videoA_buffer_yuv420 = (uint8_t *)av_malloc(m_iYuv420FrameSize);
	av_image_fill_arrays(pFrameVideoA->data, pFrameVideoA->linesize, videoA_buffer_yuv420, AV_PIX_FMT_YUV420P, m_pReadCodecCtx_VideoA->width, m_pReadCodecCtx_VideoA->height, 1);

	pFrameVideoA->width = m_iVideoWidth;
	pFrameVideoA->height = m_iVideoHeight;
	pFrameVideoA->format = AV_PIX_FMT_YUV420P;

	int iOutVideoWidth = m_pReadCodecCtx_VideoA->width;
	int iOutVideoHeight = m_pReadCodecCtx_VideoA->height;

	AVPacket packet = { 0 };
	int iPicCount = 0;

	while (1)
	{
		if (NULL == m_pVideoAFifo)
		{
			break;
		}

		int iVideoASize = av_fifo_size(m_pVideoAFifo);

		if (iVideoASize >= m_iYuv420FrameSize)
		{
			EnterCriticalSection(&m_csVideoASection);
			av_fifo_generic_read(m_pVideoAFifo, videoA_buffer_yuv420, m_iYuv420FrameSize, NULL);
			LeaveCriticalSection(&m_csVideoASection);


			pFrameVideoA->pkt_dts = pFrameVideoA->pts = av_rescale_q_rnd(iPicCount, m_pCodecEncodeCtx_Video->time_base, m_pFormatCtx_Out->streams[0]->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
			pFrameVideoA->pkt_duration = 0;
			pFrameVideoA->pkt_pos = -1;


			ret = avcodec_send_frame(m_pCodecEncodeCtx_Video, pFrameVideoA);

			ret = avcodec_receive_packet(m_pCodecEncodeCtx_Video, &packet);

			av_write_frame(m_pFormatCtx_Out, &packet);

			iPicCount++;
		}
		else
		{
			if (m_hVideoAReadThread == NULL)
			{
				break;
			}
			Sleep(1);
		}
	}

	av_write_trailer(m_pFormatCtx_Out);
	avio_close(m_pFormatCtx_Out->pb);

	av_frame_free(&pFrameVideoA);
}

  • 0
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值