iOS 使用 AVAssetExportSession 将视频和音频合并成一个视频,并将原始视频进行裁剪和旋转

iOS 使用 AVAssetExportSession 将视频和音频合并成一个视频,并将原始视频进行裁剪和旋转。
//
//  ViewController.m
//  test_crop_rotation_video_01
//
//  Created by jeffasd on 2016/5/8.
//  Copyright © 2016年 jeffasd. All rights reserved.
//

#import "ViewController.h"

#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CMTime.h>

#import "AVPlayerDemoPlaybackView.h"

#import <AssetsLibrary/AssetsLibrary.h>

static void *AVPlayerDemoPlaybackViewControllerStatusObservationContext = &AVPlayerDemoPlaybackViewControllerStatusObservationContext;


@interface ViewController ()

@property (nonatomic, strong) AVPlayer* mPlayer;

@property (nonatomic, strong) AVPlayerDemoPlaybackView  *mPlaybackView;

@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    
    NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"E64349C2-EEB5-4B29-BA7D-8247074E7B41.mp4" ofType:nil]];
    
    NSString * documentsPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
    
    int index = arc4random_uniform(10);
    NSString *name = [NSString stringWithFormat:@"video_jeffasd_%d.mp4", index];
    NSString *exportPath = [documentsPath stringByAppendingPathComponent:name];
    NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
    [[NSFileManager defaultManager]  removeItemAtURL:exportUrl error:nil];
    NSLog(@"exportUrl is %@", exportUrl);
    
    [self rotateVideoAssetWithFileURL:url dstFileURL:exportUrl];
}


- (void)rotateVideoAssetWithFileURL:(NSURL *)fileURL dstFileURL:(NSURL *)dstFileURL{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES], AVURLAssetPreferPreciseDurationAndTimingKey, nil];
    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:fileURL options:options];
    
    AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVAssetTrack *audioAssetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    if (videoAssetTrack == nil || audioAssetTrack == nil) {
        NSLog(@"error is %@", @"video or audio assetTrack is nil");
        return;
    }
    
    AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.frameDuration = videoAssetTrack.minFrameDuration;
    CGSize renderSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
    videoComposition.renderSize = renderSize;
    
    //create a video instruction
    AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
    
    AVMutableVideoCompositionLayerInstruction *videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoAssetTrack];
    
    //仿射变换的坐标为iOS的屏幕坐标x向右为正y向下为正
    CGAffineTransform transform = [self videoAssetTrackTransform:videoAssetTrack];
    [videoCompositionLayerInstruction setTransform:transform atTime:kCMTimeZero];
    
    //add the transformer layer instructions, then add to video composition
    videoCompositionInstruction.layerInstructions = [NSArray arrayWithObject:videoCompositionLayerInstruction];
    videoComposition.instructions = [NSArray arrayWithObject: videoCompositionInstruction];
    
    AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
#warning when use (not AVAssetExportPresetPassthrough) AVAssetExportSession export video which is contain video and audio must add video track first,
#warning when add audio track frist error is -11841.
    AVMutableCompositionTrack *videoCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    NSError *error = nil;
    [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:&error];
    if (error) {
        NSLog(@"error is %@", error);
        return;
    }
    error = nil;
    AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:&error];
    if (error) {
        NSLog(@"error is %@", error);
        return;
    }
    NSLog(@"the assetDuration is %lld", asset.duration.value/asset.duration.timescale);
    
    AVAssetExportSession *assetExportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality] ;
    assetExportSession.shouldOptimizeForNetworkUse = YES;
    assetExportSession.videoComposition = videoComposition;
    assetExportSession.outputURL = dstFileURL;
    assetExportSession.outputFileType = AVFileTypeMPEG4;
    
    __weak AVAssetExportSession *weakAssetExportSession = assetExportSession;
    __weak typeof(self)weakSelf = self;
    [assetExportSession exportAsynchronouslyWithCompletionHandler:^
     {
         if ([weakAssetExportSession status] != AVAssetExportSessionStatusCompleted) {
             NSLog(@"the error is %@", [weakAssetExportSession error]);
             NSLog(@"the status is %ld", (long)[weakAssetExportSession status]);
             NSLog(@"the outPutPath is %@", [weakAssetExportSession.outputURL absoluteString]);
             NSLog(@"the error is %@", [weakAssetExportSession error].userInfo);
         }
         dispatch_async(dispatch_get_main_queue(), ^{
#warning here can not use weakAssetExportSession.outputURL weakAssetExportSession.outputURL some time is null but video is exit.
             [weakSelf exportDidFinish:dstFileURL];
         });
     }];
}

- (CGAffineTransform)videoAssetTrackTransform:(AVAssetTrack *)videoAssetTrack {
    int degrees = [self degressFromVideoFileWithVideoAssetTrack:videoAssetTrack];
    CGAffineTransform transform = CGAffineTransformIdentity;
    if (degrees != 0) {
        CGAffineTransform translateToCenter = CGAffineTransformIdentity;
        if (degrees == 90) {
            // 顺时针旋转90°
            translateToCenter = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, 0.0);
            transform = CGAffineTransformRotate(translateToCenter, M_PI_2);
        } else if(degrees == 180){
            // 顺时针旋转180°
            translateToCenter = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.width, videoAssetTrack.naturalSize.height);
            transform = CGAffineTransformRotate(translateToCenter, M_PI);
        } else if(degrees == 270){
            // 顺时针旋转270°
            translateToCenter = CGAffineTransformMakeTranslation(0.0, videoAssetTrack.naturalSize.width);
            transform = CGAffineTransformRotate(translateToCenter, M_PI_2 + M_PI);
        }else if(degrees == -180){
            // 绕x轴旋转180度
            //仿射变换的坐标为iOS的屏幕坐标x向右为正y向下为正
#if 1
            //transform = CGAffineTransformTranslate(transform, videoAssetTrack.naturalSize.width, videoAssetTrack.naturalSize.height);
            //transform = CGAffineTransformRotate(transform, 90/180.0f*M_PI); // 旋转90度
            //transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频
            //transform = CGAffineTransformScale(transform, -1.0, 1.0);  // 左右颠倒视频
            //transform = CGAffineTransformScale(transform, 1.0, 1.0); // 使用原始大小
            
            //原始视频
            //         ___
            //        |   |
            //        |   |
            //     -------------------- +x
            //    |
            //    |
            //    |
            //    |
            //    |
            //    |
            //    |
            //    +y
            
            //transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频
            
            //     -------------------- +x
            //    |   |   |
            //    |   |___|
            //    |
            //    |
            //    |
            //    |
            //    |
            //    +y
            
            //transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height);// 将视频平移到原始位置
            
            //         ___
            //        |   |
            //        |   |
            //     -------------------- +x
            //    |
            //    |
            //    |
            //    |
            //    |
            //    |
            //    |
            //    +y
            
            transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频
            transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height);
#else
            transform = videoAssetTrack.preferredTransform;
            transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height);
#endif
        }
    }
    
#if 0 - cropVideo
    //Here we shift the viewing square up to the TOP of the video so we only see the top
    CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, 0 );
    
    //Use this code if you want the viewing square to be in the middle of the video
    //CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, -(videoAssetTrack.naturalSize.width - videoAssetTrack.naturalSize.height) /2 );
    
    //Make sure the square is portrait
    transform = CGAffineTransformRotate(t1, M_PI_2);
#endif
    
    return transform;
}

- (int)degressFromVideoFileWithVideoAssetTrack:(AVAssetTrack *)videoAssetTrack {
    int degress = 0;
    CGAffineTransform t = videoAssetTrack.preferredTransform;
    if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0){
        // Portrait
        degress = 90;
    } else if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0){
        // PortraitUpsideDown
        degress = 270;
    } else if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0){
        // LandscapeRight
        degress = 0;
    } else if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){
        // LandscapeLeft
        degress = 180;
    } else if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){
        // LandscapeLeft
        degress = 180;
    } else if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){
        // x-axis
        degress = -180;
    }
    
    return degress;
}

- (void)exportDidFinish:(NSURL *)fileURL {
    NSLog(@"fileURL is %@", fileURL);
    AVURLAsset* asset = [AVURLAsset URLAssetWithURL:fileURL options:nil];
    AVPlayerItem * newPlayerItem = [AVPlayerItem playerItemWithAsset:asset];

    self.mPlayer = [AVPlayer playerWithPlayerItem:newPlayerItem];
    
    [self.mPlayer addObserver:self forKeyPath:@"status" options:0 context:AVPlayerDemoPlaybackViewControllerStatusObservationContext];
}

- (void)observeValueForKeyPath:(NSString*) path ofObject:(id)object change:(NSDictionary*)change context:(void*)context {
    if (self.mPlayer.status == AVPlayerStatusReadyToPlay) {
        [self.mPlaybackView setPlayer:self.mPlayer];
        
        [self.mPlayer play];
    }
}

- (AVPlayerDemoPlaybackView *)mPlaybackView{
    if (_mPlaybackView == nil) {
        AVAudioSession *audioSession = [AVAudioSession sharedInstance];
        [audioSession setCategory:AVAudioSessionCategoryPlayback error:nil];
        [audioSession setActive:YES error:nil];// Fixed bug where the phone didn't sound when it was playing in mute mode.
        
        _mPlaybackView = [[AVPlayerDemoPlaybackView alloc] initWithFrame:self.view.frame];
        [self.view addSubview:_mPlaybackView];
    }
    return _mPlaybackView;
}


- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}


@end
/*
 
Copyright (C) 2010-2011 Apple Inc. All Rights Reserved.

*/


#import <UIKit/UIKit.h>

@class AVPlayer;

@interface AVPlayerDemoPlaybackView : UIView

@property (nonatomic, retain) AVPlayer* player;

- (void)setPlayer:(AVPlayer*)player;
- (void)setVideoFillMode:(NSString *)fillMode;

@end
/*

Copyright (C) 2010-2011 Apple Inc. All Rights Reserved.

*/
/* ---------------------------------------------------------
 **  To play the visual component of an asset, you need a view 
 **  containing an AVPlayerLayer layer to which the output of an 
 **  AVPlayer object can be directed. You can create a simple 
 **  subclass of UIView to accommodate this. Use the view’s Core 
 **  Animation layer (see the 'layer' property) for rendering.  
 **  This class, AVPlayerDemoPlaybackView, is a subclass of UIView  
 **  that is used for this purpose.
 ** ------------------------------------------------------- */

#import "AVPlayerDemoPlaybackView.h"
#import <AVFoundation/AVFoundation.h>



@implementation AVPlayerDemoPlaybackView

+ (Class)layerClass
{
	return [AVPlayerLayer class];
}

- (AVPlayer*)player
{
	return [(AVPlayerLayer*)[self layer] player];
}

- (void)setPlayer:(AVPlayer*)player
{
	[(AVPlayerLayer*)[self layer] setPlayer:player];
}

/* Specifies how the video is displayed within a player layer’s bounds. 
	(AVLayerVideoGravityResizeAspect is default) */
- (void)setVideoFillMode:(NSString *)fillMode
{
	AVPlayerLayer *playerLayer = (AVPlayerLayer*)[self layer];
	playerLayer.videoGravity = fillMode;
}

@end




  • 0
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 7
    评论
评论 7
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值