最近在研究剪辑视频,目前知道了怎么剪辑视频片段和添加背景音乐,不多说了,代码写的很清楚,如下:
1 添加背景音乐调用
[PSJVideoEditor cropWithVideoUrlStr:_videoUrl audioUrl:_audioUrl start:3 end:8 isOrignalSound:isOrignalSound completion:^(NSString *outPath, BOOL isSuccess) {
if (isSuccess) {
_videoUrl = [NSURL URLWithString:outPath];
}
}];
2 剪辑视频片段调用
[PSJVideoEditor addBackgroundMiusicWithVideoUrlStr:_videoUrl audioUrl:_audioUrl start:3 end:8 isOrignalSound:isOrignalSound completion:^(NSString *outPath, BOOL isSuccess) {
if (isSuccess) {
_videoUrl = [NSURL URLWithString:outPath];
}
}];
3 附上源码
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@interface PSJVideoEditor : NSObject
/*!
@method
@brief 视频添加音频
@discussion
@param videoUrl 视频URL
@param audioUrl 音频URL
@param startTime 音频插入开始时间
@param startTime 音频插入结束时间
@param isOrignalSound 是否保留原声
@param completionHandle 完成回调
*/
+ (void)addBackgroundMiusicWithVideoUrlStr:(NSURL *)videoUrl
audioUrl:(NSURL *)audioUrl
start:(CGFloat)startTime
end:(CGFloat)startTime
isOrignalSound:(BOOL)isOrignal
completion:(void (^)(NSString *outPath,BOOL isSuccess))completionHandle;
/*!
@method
@brief 剪辑视频
@discussion
@param videoUrl 视频URL
@param audioUrl 音频URL
@param startTime 剪辑开始时间
@param startTime 剪辑结束时间
@param isOrignalSound 是否保留原声
@param completionHandle 完成回调
*/
+ (void)cropWithVideoUrlStr:(NSURL *)videoUrl
audioUrl:(NSURL *)audioUrl
start:(CGFloat)startTime
end:(CGFloat)endTime
isOrignalSound:(BOOL)isOrignal
completion:(void (^)(NSString *outPath,BOOL isSuccess))completionHandle;
@end
#import "PSJVideoEditor.h"
#import <AVFoundation/AVFoundation.h>
#define MediaFileName @"MixVideo.MOV"
@implementation PSJVideoEditor
+ (void)addBackgroundMiusicWithVideoUrlStr:(NSURL *)videoUrl
audioUrl:(NSURL *)audioUrl
start:(CGFloat)startTime
end:(CGFloat)endTime
isOrignalSound:(BOOL)isOrignal
completion:(void (^)(NSString *outPath,BOOL isSuccess))completionHandle
{
//声音来源路径(最终混合的音频)
NSURL *audio_inputFileUrl = audioUrl;
//视频来源路径
NSURL *video_inputFileUrl = videoUrl;
NSString *outputFilePath = [PSJVideoEditor fileSavePath];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
CMTime nextClipStartTime = kCMTimeZero;
//创建可变的音频视频组合
AVMutableComposition* mixComposition =[AVMutableComposition composition];
//视频采集
AVURLAsset* videoAsset =[[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack*a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:nextClipStartTime
error:nil];
CMTime start = CMTimeMakeWithSeconds(startTime, videoAsset.duration.timescale);
CMTime duration = CMTimeMakeWithSeconds(endTime - startTime,videoAsset.duration.timescale);
CMTimeRange audio_timeRange = CMTimeRangeMake(start, duration);
<span style="color:#3366ff;"> if (isOrignal) {
//视频声音采集(也可不执行这段代码不采集视频音轨,合并后的视频文件将没有视频原来的声音)
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *compositionVoiceTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVoiceTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeAudio].count>0)?[videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject:nil atTime:kCMTimeZero error:nil];
}</span>
<span style="color:#ff0000;"> //音乐声音采集
AVURLAsset* audioAsset =[[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
//CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);//声音长度截取范围==视频长度
AVMutableCompositionTrack*b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
//3秒到8秒 则atTime从3秒开始
[b_compositionAudioTrack insertTimeRange:audio_timeRange
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]
atTime:start
error:nil];</span>
//创建一个输出
AVAssetExportSession* _assetExport =[[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = outputFileUrl;
_assetExport.shouldOptimizeForNetworkUse= YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch ([_assetExport status]) {
case AVAssetExportSessionStatusFailed: {
NSLog(@"合成失败:%@",[[_assetExport error] description]);
completionHandle(outputFilePath,NO);
} break;
case AVAssetExportSessionStatusCancelled: {
completionHandle(outputFilePath,NO);
} break;
case AVAssetExportSessionStatusCompleted: {
completionHandle(outputFilePath,YES);
} break;
default: {
completionHandle(outputFilePath,NO);
} break;
}
}
];
}
+ (void)cropWithVideoUrlStr:(NSURL *)videoUrl
audioUrl:(NSURL *)audioUrl
start:(CGFloat)startTime
end:(CGFloat)endTime
isOrignalSound:(BOOL)isOrignal
completion:(void (^)(NSString *outPath,BOOL isSuccess))completionHandle
{
NSURL *audio_inputFileUrl = audioUrl;
//视频来源路径
NSURL *video_inputFileUrl = videoUrl;
NSString *outputFilePath = [PSJVideoEditor fileSavePath];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
AVURLAsset* videoAsset =[[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTime nextClipStartTime = kCMTimeZero;
//创建可变的音频视频组合
AVMutableComposition* mixComposition =[AVMutableComposition composition];
//视频采集
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack*a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:nextClipStartTime
error:nil];
CMTime start = CMTimeMakeWithSeconds(startTime, videoAsset.duration.timescale);
CMTime duration = CMTimeMakeWithSeconds(endTime - startTime,videoAsset.duration.timescale);
CMTimeRange audio_timeRange = CMTimeRangeMake(start, duration);
if (isOrignal) {
//视频声音采集(也可不执行这段代码不采集视频音轨,合并后的视频文件将没有视频原来的声音)
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *compositionVoiceTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVoiceTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeAudio].count>0)?[videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject:nil atTime:kCMTimeZero error:nil];
}
//音乐声音采集
AVURLAsset* audioAsset =[[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
//CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);//声音长度截取范围==视频长度
AVMutableCompositionTrack*b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
//3秒到8秒 则atTime从3秒开始
[b_compositionAudioTrack insertTimeRange:audio_timeRange
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]
atTime:start
error:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:videoAsset];
if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality]) {
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:videoAsset presetName:AVAssetExportPresetPassthrough];
NSURL *furl = outputFileUrl;
exportSession.outputURL = furl;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse= YES;
<span style="color:#ff0000;">//剪辑视频片段 设置timeRange
CMTime start = CMTimeMakeWithSeconds(startTime, videoAsset.duration.timescale);
CMTime duration = CMTimeMakeWithSeconds(endTime - startTime,videoAsset.duration.timescale);
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;</span>
[exportSession exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch ([exportSession status]) {
case AVAssetExportSessionStatusFailed: {
NSLog(@"合成失败:%@",[[exportSession error] description]);
completionHandle(outputFilePath,NO);
} break;
case AVAssetExportSessionStatusCancelled: {
completionHandle(outputFilePath,NO);
} break;
case AVAssetExportSessionStatusCompleted: {
completionHandle(outputFilePath,YES);
} break;
default: {
completionHandle(outputFilePath,NO);
} break;
}
}
];
}
}
+ (CGFloat)getMediaDurationWithMediaUrl:(NSString *)mediaUrlStr {
NSURL *mediaUrl = [NSURL URLWithString:mediaUrlStr];
AVURLAsset *mediaAsset = [[AVURLAsset alloc] initWithURL:mediaUrl options:nil];
CMTime duration = mediaAsset.duration;
return duration.value / duration.timescale;
}
+ (NSString *)getMediaFilePath {
return [NSTemporaryDirectory() stringByAppendingPathComponent:MediaFileName];
}
+ (NSString *)fileSavePath
{
NSDate *date = [NSDate date];
NSInteger nowInter = (long)[date timeIntervalSince1970];
NSString *fileName = [NSString stringWithFormat:@"output%ld.mp4",(long)nowInter];
NSString *documentsDirectory =[NSHomeDirectory()
stringByAppendingPathComponent:@"Documents"];
//最终合成输出路径
NSString *outputFilePath =[documentsDirectory stringByAppendingPathComponent:fileName];
// NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if([[NSFileManager defaultManager]fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager]removeItemAtPath:outputFilePath error:nil];
return outputFilePath;
}
@end