//
// ViewController.m
// test_crop_rotation_video_01
//
// Created by jeffasd on 2016/5/8.
// Copyright © 2016年 jeffasd. All rights reserved.
//
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CMTime.h>
#import "AVPlayerDemoPlaybackView.h"
#import <AssetsLibrary/AssetsLibrary.h>
static void *AVPlayerDemoPlaybackViewControllerStatusObservationContext = &AVPlayerDemoPlaybackViewControllerStatusObservationContext;
@interface ViewController ()
@property (nonatomic, strong) AVPlayer* mPlayer;
@property (nonatomic, strong) AVPlayerDemoPlaybackView *mPlaybackView;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"E64349C2-EEB5-4B29-BA7D-8247074E7B41.mp4" ofType:nil]];
NSString * documentsPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
int index = arc4random_uniform(10);
NSString *name = [NSString stringWithFormat:@"video_jeffasd_%d.mp4", index];
NSString *exportPath = [documentsPath stringByAppendingPathComponent:name];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
[[NSFileManager defaultManager] removeItemAtURL:exportUrl error:nil];
NSLog(@"exportUrl is %@", exportUrl);
[self rotateVideoAssetWithFileURL:url dstFileURL:exportUrl];
}
- (void)rotateVideoAssetWithFileURL:(NSURL *)fileURL dstFileURL:(NSURL *)dstFileURL{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES], AVURLAssetPreferPreciseDurationAndTimingKey, nil];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:fileURL options:options];
AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioAssetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
if (videoAssetTrack == nil || audioAssetTrack == nil) {
NSLog(@"error is %@", @"video or audio assetTrack is nil");
return;
}
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = videoAssetTrack.minFrameDuration;
CGSize renderSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
videoComposition.renderSize = renderSize;
//create a video instruction
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction *videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoAssetTrack];
//仿射变换的坐标为iOS的屏幕坐标x向右为正y向下为正
CGAffineTransform transform = [self videoAssetTrackTransform:videoAssetTrack];
[videoCompositionLayerInstruction setTransform:transform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
videoCompositionInstruction.layerInstructions = [NSArray arrayWithObject:videoCompositionLayerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: videoCompositionInstruction];
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
#warning when use (not AVAssetExportPresetPassthrough) AVAssetExportSession export video which is contain video and audio must add video track first,
#warning when add audio track frist error is -11841.
AVMutableCompositionTrack *videoCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *error = nil;
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:&error];
if (error) {
NSLog(@"error is %@", error);
return;
}
error = nil;
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:&error];
if (error) {
NSLog(@"error is %@", error);
return;
}
NSLog(@"the assetDuration is %lld", asset.duration.value/asset.duration.timescale);
AVAssetExportSession *assetExportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality] ;
assetExportSession.shouldOptimizeForNetworkUse = YES;
assetExportSession.videoComposition = videoComposition;
assetExportSession.outputURL = dstFileURL;
assetExportSession.outputFileType = AVFileTypeMPEG4;
__weak AVAssetExportSession *weakAssetExportSession = assetExportSession;
__weak typeof(self)weakSelf = self;
[assetExportSession exportAsynchronouslyWithCompletionHandler:^
{
if ([weakAssetExportSession status] != AVAssetExportSessionStatusCompleted) {
NSLog(@"the error is %@", [weakAssetExportSession error]);
NSLog(@"the status is %ld", (long)[weakAssetExportSession status]);
NSLog(@"the outPutPath is %@", [weakAssetExportSession.outputURL absoluteString]);
NSLog(@"the error is %@", [weakAssetExportSession error].userInfo);
}
dispatch_async(dispatch_get_main_queue(), ^{
#warning here can not use weakAssetExportSession.outputURL weakAssetExportSession.outputURL some time is null but video is exit.
[weakSelf exportDidFinish:dstFileURL];
});
}];
}
- (CGAffineTransform)videoAssetTrackTransform:(AVAssetTrack *)videoAssetTrack {
int degrees = [self degressFromVideoFileWithVideoAssetTrack:videoAssetTrack];
CGAffineTransform transform = CGAffineTransformIdentity;
if (degrees != 0) {
CGAffineTransform translateToCenter = CGAffineTransformIdentity;
if (degrees == 90) {
// 顺时针旋转90°
translateToCenter = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, 0.0);
transform = CGAffineTransformRotate(translateToCenter, M_PI_2);
} else if(degrees == 180){
// 顺时针旋转180°
translateToCenter = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.width, videoAssetTrack.naturalSize.height);
transform = CGAffineTransformRotate(translateToCenter, M_PI);
} else if(degrees == 270){
// 顺时针旋转270°
translateToCenter = CGAffineTransformMakeTranslation(0.0, videoAssetTrack.naturalSize.width);
transform = CGAffineTransformRotate(translateToCenter, M_PI_2 + M_PI);
}else if(degrees == -180){
// 绕x轴旋转180度
//仿射变换的坐标为iOS的屏幕坐标x向右为正y向下为正
#if 1
//transform = CGAffineTransformTranslate(transform, videoAssetTrack.naturalSize.width, videoAssetTrack.naturalSize.height);
//transform = CGAffineTransformRotate(transform, 90/180.0f*M_PI); // 旋转90度
//transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频
//transform = CGAffineTransformScale(transform, -1.0, 1.0); // 左右颠倒视频
//transform = CGAffineTransformScale(transform, 1.0, 1.0); // 使用原始大小
//原始视频
// ___
// | |
// | |
// -------------------- +x
// |
// |
// |
// |
// |
// |
// |
// +y
//transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频
// -------------------- +x
// | | |
// | |___|
// |
// |
// |
// |
// |
// +y
//transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height);// 将视频平移到原始位置
// ___
// | |
// | |
// -------------------- +x
// |
// |
// |
// |
// |
// |
// |
// +y
transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频
transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height);
#else
transform = videoAssetTrack.preferredTransform;
transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height);
#endif
}
}
#if 0 - cropVideo
//Here we shift the viewing square up to the TOP of the video so we only see the top
CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, 0 );
//Use this code if you want the viewing square to be in the middle of the video
//CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, -(videoAssetTrack.naturalSize.width - videoAssetTrack.naturalSize.height) /2 );
//Make sure the square is portrait
transform = CGAffineTransformRotate(t1, M_PI_2);
#endif
return transform;
}
- (int)degressFromVideoFileWithVideoAssetTrack:(AVAssetTrack *)videoAssetTrack {
int degress = 0;
CGAffineTransform t = videoAssetTrack.preferredTransform;
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0){
// Portrait
degress = 90;
} else if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0){
// PortraitUpsideDown
degress = 270;
} else if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0){
// LandscapeRight
degress = 0;
} else if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){
// LandscapeLeft
degress = 180;
} else if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){
// LandscapeLeft
degress = 180;
} else if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){
// x-axis
degress = -180;
}
return degress;
}
- (void)exportDidFinish:(NSURL *)fileURL {
NSLog(@"fileURL is %@", fileURL);
AVURLAsset* asset = [AVURLAsset URLAssetWithURL:fileURL options:nil];
AVPlayerItem * newPlayerItem = [AVPlayerItem playerItemWithAsset:asset];
self.mPlayer = [AVPlayer playerWithPlayerItem:newPlayerItem];
[self.mPlayer addObserver:self forKeyPath:@"status" options:0 context:AVPlayerDemoPlaybackViewControllerStatusObservationContext];
}
- (void)observeValueForKeyPath:(NSString*) path ofObject:(id)object change:(NSDictionary*)change context:(void*)context {
if (self.mPlayer.status == AVPlayerStatusReadyToPlay) {
[self.mPlaybackView setPlayer:self.mPlayer];
[self.mPlayer play];
}
}
- (AVPlayerDemoPlaybackView *)mPlaybackView{
if (_mPlaybackView == nil) {
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayback error:nil];
[audioSession setActive:YES error:nil];// Fixed bug where the phone didn't sound when it was playing in mute mode.
_mPlaybackView = [[AVPlayerDemoPlaybackView alloc] initWithFrame:self.view.frame];
[self.view addSubview:_mPlaybackView];
}
return _mPlaybackView;
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
/*
Copyright (C) 2010-2011 Apple Inc. All Rights Reserved.
*/
#import <UIKit/UIKit.h>
@class AVPlayer;
@interface AVPlayerDemoPlaybackView : UIView
@property (nonatomic, retain) AVPlayer* player;
- (void)setPlayer:(AVPlayer*)player;
- (void)setVideoFillMode:(NSString *)fillMode;
@end
/*
Copyright (C) 2010-2011 Apple Inc. All Rights Reserved.
*/
/* ---------------------------------------------------------
** To play the visual component of an asset, you need a view
** containing an AVPlayerLayer layer to which the output of an
** AVPlayer object can be directed. You can create a simple
** subclass of UIView to accommodate this. Use the view’s Core
** Animation layer (see the 'layer' property) for rendering.
** This class, AVPlayerDemoPlaybackView, is a subclass of UIView
** that is used for this purpose.
** ------------------------------------------------------- */
#import "AVPlayerDemoPlaybackView.h"
#import <AVFoundation/AVFoundation.h>
@implementation AVPlayerDemoPlaybackView
+ (Class)layerClass
{
return [AVPlayerLayer class];
}
- (AVPlayer*)player
{
return [(AVPlayerLayer*)[self layer] player];
}
- (void)setPlayer:(AVPlayer*)player
{
[(AVPlayerLayer*)[self layer] setPlayer:player];
}
/* Specifies how the video is displayed within a player layer’s bounds.
(AVLayerVideoGravityResizeAspect is default) */
- (void)setVideoFillMode:(NSString *)fillMode
{
AVPlayerLayer *playerLayer = (AVPlayerLayer*)[self layer];
playerLayer.videoGravity = fillMode;
}
@end