一、概述
最近正在处理android上的mp4切割问题。学习了很多mp4的知识,mp4文件按照编码类型,分为mpeg-4,avc这两种;这两种类型的mp4在后面的处理中会有不同的地方。
在android系统下,MP4的分割大致有三种可选择的方法,(1)FFmpeg框架,对视频进行一帧一帧解码编码处理。(2)第三方java开源库,比如优秀的mp4parser。(3)Android4.1中新加入的api:MediaCodec。
以上三种方法都有自己的优劣势,简单介绍:
(1)FFmpeg,需要做jni实现。编码解码可以裁剪任意长度视频的任意中间长度区间,支持对视频加入水印,配乐,滤镜处理;支持裁剪屏幕尺寸。最致命的缺点是:
效率低的难以忍受,大概是1s的视频1.5S的处理时间,如果Android应用中这样处理让用户等待完全找死行为。
(2)mp4Parser。有非常详细的资料和demo可以学习,是一个大牛的个人作品。但是它对视频的裁剪处理非常粗糙,只能从关键帧进行裁剪。经过我测试:mpeg-4格式的mp4文件,大概2S-10S左右会有一个关键帧;avc格式的mp4文件大概0.3S有一个关键帧。
很大的有点是运行效率非常高,可以忽略时间成本。
但是这样会引入很明显的问题,如果你的业务需求需要准确截取,无论你是选择上一个节点还是下一个,头尾大概都会引入平均3-4S的长度误差。这个误差有时难以忍受。
(3)mediaCodec是Android4.1才引入的。如果以来这个Lib进行操作,会导致4.1之前的版本无法使用。另外,需要说明,这个api以来很多C层和硬件层的实现,不要试图将所有代码分隔出来加到自己的项目中。
下篇给出一个可行的方案来解决android上的mp4切割问题。
二、实现
这节谈一下如何在android上实现mp4文件的高效率切割。
业务需求举例:把一段2分钟的mp4文件切割出00:42 至 01:16这段时间的视频,要求足够短的执行时间和尽量少的误差。
分析:mp4Parser只能在关键帧切割,比如,在00:40和00:45分别存在一个可切割关键帧,那么切割视频的头和尾,都应该选择短切割。然后获取到误差的视频短,如果这个误差大于0.5S,用FFmpeg进行一帧一帧编解码切割文件。这样最多会有三段mp4文件,再次将这三段mp4拼接起来就可以了。
下面直接上关键代码,这些代码在PC上新建一个java工程也可以实现。
1.切割文件方法:
/**
需要使用isoviewer-1.0-RC-27包
返回值是目标mp4的开头和结尾时刻
**/
public static double[] startTrim(File src, File dst, int startMs, int endMs) throws IOException {
Movie movie = MovieCreator.build(src.getAbsolutePath());
List<Track> tracks = movie.getTracks();
movie.setTracks(new LinkedList<Track>());
double startTime = startMs/1000;
double endTime = endMs/1000;
boolean timeCorrected = false;
// Here we try to find a track that has sync samples. Since we can only start decoding
// at such a sample we SHOULD make sure that the start of the new fragment is exactly
// such a frame
for (Track track : tracks) {
if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) {
if (timeCorrected) {
throw new RuntimeException("The startTime has already been corrected by another track with SyncSample. Not Supported.");
}
//true,false表示短截取;false,true表示长截取
startTime = correctTimeToSyncSample(track, startTime, true);
endTime = correctTimeToSyncSample(track, endTime, false);
timeCorrected = true;
}
}
int x = 0;
for (Track track : tracks) {
long currentSample = 0;
double currentTime = 0;
long startSample = -1;
long endSample = -1;
x++;
for (int i = 0; i < track.getDecodingTimeEntries().size(); i++) {
TimeToSampleBox.Entry entry = track.getDecodingTimeEntries().get(i);
for (int j = 0; j < entry.getCount(); j++) {
// entry.getDelta() is the amount of time the current sample covers.
if (currentTime <= startTime) {
// current sample is still before the new starttime
startSample = currentSample;
}
if (currentTime <= endTime) {
// current sample is after the new start time and still before the new endtime
endSample = currentSample;
} else {
// current sample is after the end of the cropped video
break;
}
currentTime += (double) entry.getDelta() / (double) track.getTrackMetaData().getTimescale();
currentSample++;
}
}
movie.addTrack(new CroppedTrack(track, startSample, endSample));
break;
}
Container container = new DefaultMp4Builder().build(movie);
if (!dst.exists()) {
dst.createNewFile();
}
FileOutputStream fos = new FileOutputStream(dst);
FileChannel fc = fos.getChannel();
container.writeContainer(fc);
fc.close();
fos.close();
double[] doubleArray = new double[2] ;
doubleArray[0] = startTime;
doubleArray[1] = endTime;
return doubleArray;
}
2.ffmpeg切割方法,需要jni实现。稍后补充
public String getMp4ByFFmpeg(double mTimeStart,double mTimeEnd,String videoPath){
try{
String mFinalVideoPath = videoPath;
int audioChannels = 2;
FFmpegRecorder recorder = new FFmpegRecorder(
mFinalVideoPath, RecorderConfig.TARGET_VIDEO_WIDTH,
RecorderConfig.TARGET_VIDEO_HEIGHT, audioChannels);
RecorderConfig.setRecorderConfig(recorder, RecorderConfig.CONFIG_TYPE_MPEG4_HIGH);
int totalFrames = 0;
FFmpegGrabber grabber = FFmpegGrabber.createDefault(mPath);
grabber.setSquareSize(RecorderConfig.TARGET_VIDEO_WIDTH);
int degree = VideoFileUtil.getRotate(mPath);
grabber.setOrientation(degree);
grabber.start();
if (mTimeStart > 0) {
grabber.setTimestamp((long)mTimeStart);
}
totalFrames = grabber.getLengthInFrames();
VideoClip mFinalClip = new VideoClip();
mFinalClip.mIsFromLocal = true;
mFinalClip.mHeight = RecorderConfig.TARGET_VIDEO_HEIGHT;
mFinalClip.mWidth = RecorderConfig.TARGET_VIDEO_WIDTH;
recorder.setAudioChannels(grabber.getAudioChannels());
recorder.setSampleRate(grabber.getSampleRate());
recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
recorder.setFrameRate(FFmpegRecorder.DEFAULT_FRAME_RATE);
recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4);
recorder.start();
mFinalClip.mOrientation = 0;
mFinalClip.mFrameRate = (int) recorder.getFrameRate();
mFinalClip.mSampleRate = recorder.getSampleRate();
mFinalClip.mAudioBitrate = recorder.getAudioBitrate();
mFinalClip.mAudioChannels = recorder.getAudioChannels();
Frame grabbedFrame = new Frame();
int j = 0;
boolean videoTimeout = false;
boolean audioTimeout = false;
while (grabber.grabFrame(grabbedFrame)) {
long i = grabber.getTimestamp();
long k = grabber.getFrameNumber();
if (videoTimeout && audioTimeout) {
break;
}
if (grabbedFrame.hasVideoFrame()) {
int progress = 100 * (int) (i - mTimeStart) / mTotalTimeSpan;
publishProgress(progress);
}
if (i > mTimeEnd) {
if (grabbedFrame.hasAudioFrame()) {
audioTimeout = true;
}
if (grabbedFrame.hasVideoFrame()) {
videoTimeout = true;
}
continue;
}
grabbedFrame.setTimeStamp((long)(i - mTimeStart));
recorder.recordFrameNoException(grabbedFrame);
SLog.v(TAG, "record image at {}, #{}", i, k);
j++;
}
grabbedFrame.releaseNativeAllocation();
grabber.stop();
grabber.release();
recorder.stop();
recorder.release();
mFinalClip.mClipPath = mFinalVideoPath;
mFinalClip.mDuration = (long) (MP4ParserUtil.getDuration(mFinalVideoPath) * 1000);
mFinalClip.mTargetMills = mFinalClip.mDuration;
return mFinalVideoPath;
} catch (Exception ex) {
return null;
}
}
3.拼接三段视频代码
public boolean newClipMethod(String dstFile,String srcFile){
try {
double[] results = ClipMp4Util.startTrim(new File(dstFile),new File(srcFile),mTimeStart,mTimeEnd);
if(results == null){
return false;
}
Log.d("","newClipMethod-->results[0]-mTimeStart"+results[0]+" "+mTimeStart/1000);
Log.d("","newClipMethod-->mTimeEnd-results[1]"+mTimeEnd/1000+" "+results[1]);
//下面是短截取然后拼接的逻辑
if(results[0]-mTimeStart/1000>GAP){
String startMp4 = <span style="font-family: Arial, Helvetica, sans-serif;">getMp4ByFFmpeg(</span><span style="font-family: Arial, Helvetica, sans-serif;">mTimeStart,results[0]*1000,begin);</span>
}
if(mTimeEnd/1000-results[1]>GAP){
String endMp4 = <span style="font-family: Arial, Helvetica, sans-serif;">getMp4ByCode(</span><span style="font-family: Arial, Helvetica, sans-serif;">results[1]*吧1000,mTimeEnd,end);</span>
}
String[] videos = new String[3];
videos[0] = begin;
videos[1] = dst;
videos[2] = end;
appendVideo(videos);
} catch (Exception e) {
//如果不是同一格式的视频,这里合成会报错,直接返回中间视频.所以长视频选取长误差的方式,前后都多截取一段
Log.d("","new Method exception-->"+e);
e.printStackTrace();
}
return true;
}
原文:http://blog.csdn.net/banking17173/article/details/20646251