将两段视频合成到一起,画面前后合成,demoUrl为前边的,myUrl为后边的
- (void)syntheticTheVideoWithDemoUrl:(NSURL *)demoUrl myUrl:(NSURL *)myUrl
{
[self loadFrontAssetWithDemoUrl:demoUrl];
[self loadBackAssetWithMyUrl:myUrl];
}
- (void)loadFrontAssetWithDemoUrl:(NSURL *)url withType:(TaskType)type {
AVAsset *asset = [AVAsset assetWithURL:url];
[asset whenProperties:@[ @"tracks" ] areReadyDo:^{
frontAsset = asset;
[self mergetFrontBack];
}];
}
- (void)loadBackAssetWithMyUrl:(NSURL *)url withType:(TaskType)type {
AVAsset *asset = [AVAsset assetWithURL:url];
[asset whenProperties:@[ @"tracks" ] areReadyDo:^{
backAsset = asset;
[self mergetFrontBack];
}];
}
- (void)mergetFrontBack {
if (!backAsset || !frontAsset) {
HHLog(@"something happened");
return;
}
AVMutableComposition *composition = [AVMutableComposition composition];
[self addAsset:frontAsset toComposition:composition withTrackID:1 atTime:
kCMTimeZero
];[self addAsset:backAsset toComposition:composition withTrackID:2 atTime:
frontAsset.duration
];
[self addAudioAsset:frontAsset toComposition:composition withTrackID:1 atTime:
kCMTimeZero
];[self addAudioAsset:backAsset toComposition:composition withTrackID:2 atTime:
frontAsset.duration
];
AVAssetTrack *backVideoTrack = backAsset.firstVideoTrack;
AVAssetTrack *frontVideoTrack = frontAsset.firstVideoTrack;
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = CGSizeMake(frontVideoTrack.naturalSize.width, frontVideoTrack.naturalSize.height);
videoComposition.frameDuration = CMTimeMakeWithSeconds(1.0 / frontVideoTrack.nominalFrameRate, frontVideoTrack.naturalTimeScale);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(frontAsset.duration, backAsset.duration));
AVMutableVideoCompositionLayerInstruction *frontLayerInstruction =[AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:frontVideoTrack];
AVMutableVideoCompositionLayerInstruction *backLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:backVideoTrack];
instruction.layerInstructions = @[frontLayerInstruction, backLayerInstruction];
videoComposition.instructions = @[instruction];
AVAssetExportSession *exporter = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
NSString *urlPath = [documentPath stringByAppendingPathComponent:@"export.mp4"];
if ([fileManager fileExistsAtPath:urlPath]) {
[fileManager removeItemAtPath:urlPath error:nil];
}
exporter.outputURL = [NSURL fileURLWithPath:urlPath];
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = videoComposition;
exporter.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(frontAsset.duration, backAsset.duration));
[exporter exportAsynchronouslyWithCompletionHandler:^{
AVAssetExportSessionStatus status = exporter.status;
dispatch_async(dispatch_get_main_queue(), ^{
switch (status) {
case AVAssetExportSessionStatusCompleted:{
break;
}
case AVAssetExportSessionStatusFailed:{
}
break;
}
default:
break;
}
});
}];
}
- (void)addAsset:(AVAsset *)asset toComposition:(AVMutableComposition *)composition withTrackID:(CMPersistentTrackID)trackID atTime:(CMTime)time {
AVMutableCompositionTrack *videoTrack = [compositionaddMutableTrackWithMediaType:AVMediaTypeVideopreferredTrackID:trackID];
CMTimeRange timeRange;
if (CMTimeGetSeconds(frontAsset.duration) > CMTimeGetSeconds(backAsset.duration)) {
timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero,backAsset.duration);
}else{
timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero,frontAsset.duration);
}
AVAssetTrack *assetVideoTrack = asset.firstVideoTrack;
[videoTrack insertTimeRange:timeRange ofTrack:assetVideoTrack atTime:time error:nil];
}
- (void)addAudioAsset:(AVAsset *)asset toComposition:(AVMutableComposition*)composition withTrackID:(CMPersistentTrackID)trackID atTime:(CMTime)time{
AVMutableCompositionTrack *audioTrack = [compositionaddMutableTrackWithMediaType:AVMediaTypeAudiopreferredTrackID:trackID];
CMTimeRange timeRange;
if (CMTimeGetSeconds(frontAsset.duration) > CMTimeGetSeconds(backAsset.duration)) {
timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero,backAsset.duration);
}else{
timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero,frontAsset.duration);
}
AVAssetTrack *assetVideoTrack = asset.firstAudioTrack;
[audioTrack insertTimeRange:timeRange ofTrack:assetVideoTrack atTime:time error:nil];
}
给AVAsset添加一个分类
- (AVAssetTrack *)firstVideoTrack {
NSArray *tracks = [self tracksWithMediaType:AVMediaTypeVideo];
return [tracks firstObject];
}
- (AVAssetTrack *)firstAudioTrack {
NSArray *tracks = [self tracksWithMediaType:AVMediaTypeAudio];
return [tracks firstObject];
}
- (void)whenProperties:(NSArray *)names areReadyDo:(void (^)(void))block {
[selfloadValuesAsynchronouslyForKeys:names completionHandler:^{
NSMutableArray *pendingNames;
for (NSString *namein names) {
switch ([selfstatusOfValueForKey:name error:nil]) {
caseAVKeyValueStatusLoaded:
caseAVKeyValueStatusFailed:
break;
default:
if (pendingNames == nil) {
pendingNames = [NSMutableArrayarray];
}
[pendingNames addObject:name];
}
}
if (pendingNames == nil) {
block();
} else {
[self whenProperties:pendingNames areReadyDo:block];
}
}];
}