最近在做关于视频压缩剪切的模块,开始时是完全没有思路,太费劲了,没办法静下心来继续研究,终于有点小成果,在此做个记录,为了自己下次的使用方便,也为了能帮助到别人吧!
说一下需求: 我的需求是将一段视频压缩成指定格式(比如320 X 480),基于AVFoundation框架
下面上代码:
storyBoard上直接拉拽UIButton,并关联事件
#import <AVFoundation/AVFoundation.h>
#import "ParseViewController.h"
@interface ParseViewController ()
@end
@implementation ParseViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#param mark - Actions
- (IBAction)parse:(UIButton *)sender {
[self exportVideo];
}
#param mark - Methods
- (void)exportVideo {
// 为了方便测试,我将一段视频直接放到了桌面,从桌面加载的视频,也将压缩完成视频放在桌面
NSString *path = @"/Users/vs/Desktop/BeforParse.m4v"; // 来源路径
NSString *outputFilePath = @"/Users/vs/Desktop/hehe.mp4"; // 输出路径
[self parseVideoWithInputUrl:[NSURL fileURLWithPath:path] outputUrl:[NSURL fileURLWithPath:outputFilePath] blockHandle:^(AVAssetExportSession *avAssetExportSession) {
switch (avAssetExportSession.status) {
case AVAssetExportSessionStatusFailed: // 失败
NSLog(@"exportSessionError: %@",avAssetExportSession.error.description);
break;
case AVAssetExportSessionStatusExporting:
NSLog(@"AVAssetExportSessionExporting");
break;
case AVAssetExportSessionStatusCompleted: // 成功
NSLog(@"exportSessionCompleted");
dispatch_async(dispatch_get_main_queue(), ^{
[self performSelector:@selector(doSomeThings) withObject:self];
});
break;
}
}];
}
- (void)parseVideoWithInputUrl:(NSURL *)inputUrl outputUrl:(NSURL *)outputUrl blockHandle:(void(^)(AVAssetExportSession *avAssetExportSession)) handle {
AVAsset *avAsset = [AVAsset assetWithURL:inputUrl];
CMTime assetTime = [avAsset duration];
float duration = CMTimeGetSeconds(assetTime);
NSLog(@"视频时长是:%f",duration);
CMTime totalDuration = kCMTimeZero;
AVMutableComposition *avMutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *avMutableCompTrack = [avMutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *avAssetTack = [avAsset tracksWithMediaType:AVMediaTypeVideo][0];
NSError *error = nil;
// [avMutableCompTrack insertTimeRange:CMTimeRangeMake(CMTimeMake(0,12), CMTimeMake(12, duration)) ofTrack:avAssetTack atTime:kCMTimeZero error:&error];
[avMutableCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:avAssetTack atTime:kCMTimeZero error:&error];
if (error) {
NSLog(@"error: %@",error.description);
return;
}
AVMutableVideoCompositionLayerInstruction *avMutableVideoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:avMutableCompTrack];
[avMutableVideoCompositionLayerInstruction setTransform:avAssetTack.preferredTransform atTime:kCMTimeZero];
totalDuration = CMTimeAdd(totalDuration, assetTime);
CGSize renderSize = CGSizeMake(0, 0);
renderSize.width = avAssetTack.naturalSize.width;
renderSize.height = avAssetTack.naturalSize.height;
CGFloat rateW = 320/renderSize.width; // 设置压缩比例
CGFloat rateH = 480/renderSize.height;
// 使用矩阵修改视频的压缩比例
CGAffineTransform layerTransform = CGAffineTransformMake(avAssetTack.preferredTransform.a, avAssetTack.preferredTransform.b, avAssetTack.preferredTransform.c, avAssetTack.preferredTransform.d, avAssetTack.preferredTransform.tx * rateW, avAssetTack.preferredTransform.ty * rateH);
layerTransform = CGAffineTransformConcat(layerTransform, CGAffineTransformMake(1, 0, 0, 1, 0, 0));
layerTransform = CGAffineTransformMakeScale(rateW,rateH);
[avMutableVideoCompositionLayerInstruction setTransform:layerTransform atTime:kCMTimeZero];
[avMutableVideoCompositionLayerInstruction setOpacity:0.0 atTime:totalDuration];
AVMutableVideoCompositionInstruction *avMutableVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
// [avMutableVideoCompositionInstruction setTimeRange:CMTimeRangeMake(kCMTimeZero, [avMutableComposition duration])];
avMutableVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration);
avMutableVideoCompositionInstruction.layerInstructions = [NSArray arrayWithObject:avMutableVideoCompositionLayerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:avMutableVideoCompositionInstruction];
videoComposition.renderSize = CGSizeMake(320.f, 480.f); // 视频的格式
videoComposition.frameDuration = CMTimeMake(1, 10);
AVAssetExportSession *avAssetExportSession = [[AVAssetExportSession alloc] initWithAsset:avMutableComposition presetName:AVAssetExportPreset640x480];
[avAssetExportSession setVideoComposition:videoComposition];
[avAssetExportSession setOutputFileType:AVFileTypeMPEG4];
[avAssetExportSession setOutputURL:outputUrl];
[avAssetExportSession setShouldOptimizeForNetworkUse:YES];
[avAssetExportSession exportAsynchronouslyWithCompletionHandler:^{
handle(avAssetExportSession);
}];
}
- (void)doSomeThings {
// 截取成功需要做的事情
}
@end
这样对比一下压缩处理过后的视频,尺寸、大小都变了。这是我暂时研究出的,如有大家有好的demo,可以向我提出,因为视频这块比较陌生,大家一起学习!
可以参考一下这个博客:blog.csdn.net/lookyou111/article/details/25625609