将两段视频合成到一起,左右画面合成,demoUrl为左边的,myUrl为右边的
- (void)syntheticTheVideoWithDemoUrl:(NSURL *)demoUrl myUrl:(NSURL *)myUrl
{
[selfloadFrontAssetWithDemoUrl:demoUrl];
[selfloadBackAssetWithMyUrl:myUrl];
}
- (void)loadFrontAssetWithDemoUrl:(NSURL *)url withType:(TaskType)type {
AVAsset *asset = [AVAsset assetWithURL:url];
[asset whenProperties:@[ @"tracks" ] areReadyDo:^{
frontAsset = asset;
[self mergetLeftRight];
}];
}
- (void)loadBackAssetWithMyUrl:(NSURL *)url withType:(TaskType)type {
AVAsset *asset = [AVAsset assetWithURL:url];
[asset whenProperties:@[ @"tracks" ] areReadyDo:^{
backAsset = asset;
[self mergetLeftRight];
}];
}
- (void)mergetLeftRight {
if (!backAsset || !frontAsset) {
HHLog(@"something happened");
return;
}
AVMutableComposition *composition = [AVMutableCompositioncomposition];
[selfaddAsset:frontAssettoComposition:compositionwithTrackID:1];
[selfaddAsset:backAssettoComposition:compositionwithTrackID:2];
[selfaddAudioAsset:frontAssettoComposition:compositionwithTrackID:1];
[selfaddAudioAsset:backAssettoComposition:compositionwithTrackID:2];
AVAssetTrack *backVideoTrack = backAsset.firstVideoTrack;
AVAssetTrack *frontVideoTrack = frontAsset.firstVideoTrack;
AVMutableVideoComposition *videoComposition = [AVMutableVideoCompositionvideoComposition];
//视频渲染大小为4:3
videoComposition.renderSize = CGSizeMake(width, width * 3 / 4);
videoComposition.frameDuration = CMTimeMakeWithSeconds(1.0 / backVideoTrack.nominalFrameRate, frontVideoTrack.naturalTimeScale);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstructionvideoCompositionInstruction];
instruction.timeRange = [composition.tracks.firstObjecttimeRange];
AVMutableVideoCompositionLayerInstruction *frontLayerInstruction = [AVMutableVideoCompositionLayerInstructionvideoCompositionLayerInstruction];
[frontLayerInstruction setTransform:CGAffineTransformMakeScale(width /2/ frontVideoTrack.naturalSize.width, width *3 / 4 / frontVideoTrack.naturalSize.height)atTime:kCMTimeZero];
frontLayerInstruction.trackID = 1;
AVMutableVideoCompositionLayerInstruction *backLayerInstruction = [AVMutableVideoCompositionLayerInstructionvideoCompositionLayerInstruction];
backLayerInstruction.trackID = 2;
[backLayerInstruction setTransform:CGAffineTransformMakeTranslation(width /2,0)atTime:kCMTimeZero];
//左右画面
instruction.layerInstructions = @[ frontLayerInstruction, backLayerInstruction ];
videoComposition.instructions = @[ instruction ];
AVAssetExportSession *exporter = [AVAssetExportSessionexportSessionWithAsset:compositionpresetName:AVAssetExportPresetMediumQuality];
NSString *urlPath = [documentPathstringByAppendingPathComponent:@"export.mp4"];
if ([fileManagerfileExistsAtPath:urlPath]) {
[fileManager removeItemAtPath:urlPath error:nil];
}
exporter.outputURL = [NSURL fileURLWithPath:urlPath];
exporter.outputFileType =AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse =YES;
exporter.videoComposition = videoComposition;
[exporter exportAsynchronouslyWithCompletionHandler:^{
AVAssetExportSessionStatus status = exporter.status;
dispatch_async(dispatch_get_main_queue(), ^{
switch (status) {
caseAVAssetExportSessionStatusCompleted:{
break;
}
caseAVAssetExportSessionStatusFailed:{
break;
}
default:
break;
}
});
}];
}
- (void)addAsset:(AVAsset *)asset toComposition:(AVMutableComposition *)composition withTrackID:(CMPersistentTrackID)trackID {
AVMutableCompositionTrack *videoTrack = [compositionaddMutableTrackWithMediaType:AVMediaTypeVideopreferredTrackID:trackID];
CMTimeRange timeRange;
if (CMTimeGetSeconds(frontAsset.duration) > CMTimeGetSeconds(backAsset.duration)) {
timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero,backAsset.duration);
}else{
timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero,frontAsset.duration);
}
AVAssetTrack *assetVideoTrack = asset.firstVideoTrack;
[videoTrack insertTimeRange:timeRange ofTrack:assetVideoTrack atTime:kCMTimeZeroerror:nil];
}
- (void)addAudioAsset:(AVAsset *)asset toComposition:(AVMutableComposition *)composition withTrackID:(CMPersistentTrackID)trackID {
AVMutableCompositionTrack *audioTrack = [compositionaddMutableTrackWithMediaType:AVMediaTypeAudiopreferredTrackID:trackID];
CMTimeRange timeRange;
if (CMTimeGetSeconds(frontAsset.duration) > CMTimeGetSeconds(backAsset.duration)) {
timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero,backAsset.duration);
}else{
timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero,frontAsset.duration);
}
AVAssetTrack *assetVideoTrack = asset.firstAudioTrack;
[audioTrack insertTimeRange:timeRange ofTrack:assetVideoTrack atTime:kCMTimeZeroerror:nil];
}
给AVAsset添加一个分类
- (AVAssetTrack *)firstVideoTrack {
NSArray *tracks = [self tracksWithMediaType:AVMediaTypeVideo];
return [tracks firstObject];
}
- (AVAssetTrack *)firstAudioTrack {
NSArray *tracks = [self tracksWithMediaType:AVMediaTypeAudio];
return [tracks firstObject];
}
- (void)whenProperties:(NSArray *)names areReadyDo:(void (^)(void))block {
[selfloadValuesAsynchronouslyForKeys:namescompletionHandler:^{
NSMutableArray *pendingNames;
for (NSString *namein names) {
switch ([selfstatusOfValueForKey:nameerror:nil]) {
caseAVKeyValueStatusLoaded:
caseAVKeyValueStatusFailed:
break;
default:
if (pendingNames == nil) {
pendingNames = [NSMutableArrayarray];
}
[pendingNames addObject:name];
}
}
if (pendingNames == nil) {
block();
} else {
[self whenProperties:pendingNames areReadyDo:block];
}
}];
}