iOS /OC之视频、音频、相机、相册综合

*******视频的三种录制方式:https://www.jianshu.com/p/fe00883ad3d2

******视频暂停、多视频合成:https://www.jianshu.com/p/174bb4f539cc

******视频合成与混音https://www.jianshu.com/p/3c585899c455

******保存图片和视频到本地相册:https://www.jianshu.com/p/dbdec1ac1be2

******视频剪切、旋转、添加音频、添加水印、视频导出:https://www.jianshu.com/p/5433143cccd8

******视频压缩:https://www.jianshu.com/p/b7061aae1e86(可以更改输出的格式,iOS录制的是mov格式,输出时候可以更改成MP4格式)

******视频获取单帧和全部帧:https://blog.csdn.net/u012265444/article/details/80020465、(单帧)https://blog.csdn.net/u011146511/article/details/51232690

******相近的一些参数设置:https://www.jianshu.com/p/06ed571fb3b5

*****数码相机中一些常用的名词:https://blog.csdn.net/fxfffp/article/details/51282053

————————————————常用的属性和方法解释——————————

*******视频处理的时间CMTime:https://blog.csdn.net/fengzhixinfei/article/details/80729346https://www.jianshu.com/p/d6f9d7e493b6

 

——————————————————————————————————————

AVFOUNDATION库中的类的作用:

AVAsset:素材库里的素材; 
AVAssetTrack:素材的轨道; 
AVMutableComposition :一个用来合成视频的工程文件; 
AVMutableCompositionTrack :工程文件中的轨道,有音频轨、视频轨等,里面可以插入各种对应的素材

AVMutableVideoComposition:用来生成video的组合指令,包含多段instruction。可以决定最终视频的尺寸,裁剪需要在这里进行; 
AVMutableVideoCompositionInstruction:一个指令(视频组合指令),决定一个timeRange内每个轨道的状态,包含多个layerInstruction; 
AVMutableVideoCompositionLayerInstruction:视频图层指令,在一个指令的时间范围内,某个轨道的状态; 

AVAssetExportSession:配置渲染参数并渲染。

  • AVPlayer视频播放类,本身不显示视频,需创建一个AVPlayerLayer层,添加到视图
  • AVAssetTrack资源轨道,包括音频轨道和视频轨道
  • AVAsset媒体信息
  • AVURLAsset根据URL路径创建的媒体信息
  • AVPlayerItem媒体资源管理对象,管理视频的基本信息和状态
  • AVMutableVideoCompositionInstruction视频操作指令
  • AVMutableVideoCompositionLayerInstruction视频轨道操作指令,需要添加到AVMutableVideoCompositionInstruction
  • AVMutableAudioMixInputParameters音频操作参数
  • AVMutableComposition包含多个轨道的媒体信息,可以添加、删除轨道
  • AVMutableVideoComposition视频操作指令集合


==============================自定义相机实现拍照和录像===============================

注意要导入相机权限,吧下面的两个类实现就OK了。

/**
  文件操作,沙盒路径
 */

#import <Foundation/Foundation.h>

@interface NSFileManager (Paths)

/**
 Get URL of Documents directory.
 
 @return Documents directory URL.
 */
+ (NSURL *)documentsURL;

/**
 Get path of Documents directory.
 
 @return Documents directory path.
 */
+ (NSString *)documentsPath;

/**
 Get URL of Library directory.
 
 @return Library directory URL.
 */
+ (NSURL *)libraryURL;

/**
 Get path of Library directory.
 
 @return Library directory path.
 */
+ (NSString *)libraryPath;

/**
 Get URL of Caches directory.
 
 @return Caches directory URL.
 */
+ (NSURL *)cachesURL;

/**
 Get path of Caches directory.
 
 @return Caches directory path.
 */
+ (NSString *)cachesPath;

/**
 Adds a special filesystem flag to a file to avoid iCloud backup it.
 
 @param path Path to a file to set an attribute.
 */
+ (BOOL)addSkipBackupAttributeToFile:(NSString *)path;

/**
 Get available disk space.
 
 @return An amount of available disk space in Megabytes.
 */
+ (double)availableDiskSpace;

@end


******
/**
 文件操作,沙盒路径
 */

#import "NSFileManager+Paths.h"
#include <sys/xattr.h>

@implementation NSFileManager (Paths)
+ (NSURL *)URLForDirectory:(NSSearchPathDirectory)directory
{
    return [self.defaultManager URLsForDirectory:directory inDomains:NSUserDomainMask].lastObject;
}

+ (NSString *)pathForDirectory:(NSSearchPathDirectory)directory
{
    return NSSearchPathForDirectoriesInDomains(directory, NSUserDomainMask, YES)[0];
}

+ (NSURL *)documentsURL
{
    return [self URLForDirectory:NSDocumentDirectory];
}

+ (NSString *)documentsPath
{
    return [self pathForDirectory:NSDocumentDirectory];
}

+ (NSURL *)libraryURL
{
    return [self URLForDirectory:NSLibraryDirectory];
}

+ (NSString *)libraryPath
{
    return [self pathForDirectory:NSLibraryDirectory];
}

+ (NSURL *)cachesURL
{
    return [self URLForDirectory:NSCachesDirectory];
}

+ (NSString *)cachesPath
{
    return [self pathForDirectory:NSCachesDirectory];
}

+ (BOOL)addSkipBackupAttributeToFile:(NSString *)path
{
    return [[NSURL.alloc initFileURLWithPath:path] setResourceValue:@(YES) forKey:NSURLIsExcludedFromBackupKey error:nil];
}

+ (double)availableDiskSpace
{
    NSDictionary *attributes = [self.defaultManager attributesOfFileSystemForPath:self.documentsPath error:nil];
    
    return [attributes[NSFileSystemFreeSize] unsignedLongLongValue] / (double)0x100000;
}
@end

=============
/**
 相机的功能的枚举
 */
typedef enum{
    
    LYBPublishTypeCameralPhoto,//拍摄照片
    LYBPublishTypeAlbumPhoto,//相册照片
     LYBPublishTypeVideo,//拍摄视频
    
} LYBPublishType;

*******
/***
 自定义相机拍照,录像,存储
 使用:需要导入分类中的#import "NSFileManager+Paths.h"s沙盒的路径
 LYBCameralController *cameravc=[[LYBCameralController alloc]init];
 cameravc.publishType= LYBPublishTypeVideo;
 [self.navigationController pushViewController:cameravc animated:YES];
 */

#import <UIKit/UIKit.h>

@interface LYBCameralController : UIViewController

@property(nonatomic,assign)CGFloat totalTime;

@property(nonatomic,assign) LYBPublishType publishType;//相机操作的类型(有录像和照相),定义在了pch中


@end

******
/***
 自定义相机拍照,录像,存储
 */

#import "LYBCameralController.h"

#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>


typedef void(^PropertyChangeBlock) (AVCaptureDevice * captureDevice);

@interface LYBCameralController ()<UIGestureRecognizerDelegate,AVCaptureFileOutputRecordingDelegate>

{
    
    NSTimer *countTimer;
    
    AVCaptureSession *_captureSession;
    
    AVCaptureDeviceInput *_captureDeviceInput;
    
    AVCaptureDevice * _audioCaptureDevice;
    
    AVCaptureMovieFileOutput *_captureMovieFileOutPut;
    
    AVCaptureStillImageOutput *_captureStillImageOutput;
    
    AVCaptureVideoPreviewLayer *_captureVideoPreviewLayer;
 
    
}

@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier;//后台任务标识

@property(nonatomic,strong)UIImageView *showImageView;

@property(nonatomic,strong)UIView *progressView;

@property(nonatomic,assign)CGFloat minProgressValue;

@property(nonatomic,assign)CGFloat currentVideoTime;

@property(nonatomic,strong)UIButton *flashButton;

@property(nonatomic,weak)UIView *cameralView;

@property(nonatomic,weak)UIButton *takeButton;

@property(nonatomic,weak)UIButton *deleteButton;

@property(nonatomic,weak)UIButton *saveButton;

@property(nonatomic,strong)UIButton *tipButton;

@property(nonatomic,assign)CGFloat beginGestureScale;

@property(nonatomic,assign)CGFloat effectiveScale;

@property(nonatomic,strong)NSMutableArray *videoUrlsArray;

@property(nonatomic,strong)NSURL *outputFileURL;

@end

static CGFloat timeinterval = 0.05;
static CGFloat navH = 64;
static CGFloat cameralViewH = 200;
@implementation LYBCameralController

- (void)dealloc{
    
    [self removeNotification];
}
- (void)viewDidLoad {
    [super viewDidLoad];

   
    if (self.totalTime <= 0.0) {
        
        self.totalTime = 10.0;
    }
    self.currentVideoTime = 0.0;
    self.effectiveScale = self.beginGestureScale = 1.0f;
    self.minProgressValue = WIDTH * timeinterval / self.totalTime;
    
    [self setupCameral];
   
    [self setupNav];

    [self setupUI];
    

    [_captureSession startRunning];
    
}
- (void)viewDidDisappear:(BOOL)animated{
    
    [super viewDidDisappear:YES];
    
    if (_captureSession) {
        [_captureSession stopRunning];
    }
}
- (void)setupNav{
    
    UIButton *cancelButton = [UIButton buttonWithType:UIButtonTypeSystem];
    [cancelButton setTitle:@"取消" forState:UIControlStateNormal];
    [cancelButton setTitleColor:[UIColor colorWithHexString:@"#333333"] forState:UIControlStateNormal];
    [cancelButton addTarget:self action:@selector(cancelClick:) forControlEvents:UIControlEventTouchUpInside];
    cancelButton.titleLabel.font = [UIFont systemFontOfSize:17];
    [cancelButton sizeToFit];
    self.navigationItem.leftBarButtonItem = [[UIBarButtonItem alloc] initWithCustomView:cancelButton];
    
    self.navigationItem.title = self.publishType ==  LYBPublishTypeVideo ? @"拍摄" : @"拍照";

    
    UIBarButtonItem *flashItem = [UIBarButtonItem flashitemWithimage:[UIImage imageNamed:@"icon_flash on_nor"] selImage:[UIImage imageNamed:@"icon_flash on_sel"] target:self action:@selector(flashClick:)];//
    
    self.flashButton = (UIButton*)flashItem.customView;//
   
    UIBarButtonItem *cameralFrontBackItem = [UIBarButtonItem itemWithimage:[UIImage imageNamed:@"icon_turn_nor"] selImage:[UIImage imageNamed:@"icon_turn_sel"] target:self action:@selector(cameralChangeFrontBackClick:)];

    self.navigationItem.rightBarButtonItems = @[cameralFrontBackItem,flashItem];
    
}

- (void)setupUI{
    
    self.view.backgroundColor  = [UIColor whiteColor];
    
    cameralViewH = (HEIGHT - navH) * 2 / 3.0;

    UIView *cameralView = [UIView new];
    cameralView.frame = CGRectMake(0, navH, WIDTH, cameralViewH);
    [self.view addSubview:cameralView];
    self.cameralView = cameralView;
    cameralView.backgroundColor = [UIColor blackColor];
    
    
    //创建视频预览层,用于实时展示摄像头状态
    _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:_captureSession];
    
    // CALayer * layer = self.cameralView.layer;
    self.cameralView.layer.masksToBounds = YES;
    
    _captureVideoPreviewLayer.frame = self.cameralView.bounds;
    
    //填充模式
    _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    
    //将视频预览层添加到界面中
    [self.cameralView.layer insertSublayer:_captureVideoPreviewLayer atIndex:0];
    
    [self setUpAdjustFoucusPinGesture];
    
    [self.cameralView addSubview:self.showImageView];
    self.showImageView.frame = cameralView.bounds;
    self.showImageView.hidden = YES;
    
    UIView *controlView = [UIView new];
    controlView.backgroundColor = [UIColor whiteColor];
    controlView.frame = CGRectMake(0, cameralView.bottom, WIDTH, HEIGHT - cameralView.bottom);
    [self.view addSubview:controlView];
    
    UIButton *takeButton = [UIButton buttonWithType:UIButtonTypeCustom];
    [takeButton setImage:[UIImage imageNamed:@"icon_shot_nor"] forState:UIControlStateNormal];
    [takeButton setImage:[UIImage imageNamed:@"icon_shot_sel"] forState:UIControlStateHighlighted];
    //[takeButton setImage:[UIImage imageNamed:@"icon_shot_sel"] forState:UIControlStateHighlighted];
    if (self.publishType ==  LYBPublishTypeVideo) {//录视频
        
        [takeButton addTarget:self action:@selector(takeDown:) forControlEvents:UIControlEventTouchDown];//按下就触发
        [takeButton addTarget:self action:@selector(takeCancel:) forControlEvents:UIControlEventTouchUpInside];//按下抬起再触发

    }
    else{//拍照
        
        [takeButton addTarget:self action:@selector(takeClick:) forControlEvents:UIControlEventTouchUpInside];
    }
    [controlView addSubview:takeButton];
    
    self.takeButton = takeButton;
    
    takeButton.size = CGSizeMake(78, 78);
    takeButton.centerX = controlView.centerX;
    takeButton.top = 75;
    
    
    UIButton *deleteButton = [self createImageButtonWithImage:[UIImage imageNamed:@"icon_del_nor"]       selImage:[UIImage imageNamed:@"icon_del_sel"]  clickSelector:@selector(deleteClick:)];
    deleteButton.hidden = YES;
    [controlView addSubview:deleteButton];
    
    self.deleteButton = deleteButton;
    
    
    deleteButton.size = CGSizeMake(54, 54);
    deleteButton.left = 34;
    deleteButton.centerY = takeButton.centerY;
    
     UIButton *saveButton = [self createImageButtonWithImage:[UIImage imageNamed:@"icon_choice"] selImage:nil clickSelector:@selector(saveClick:)];
    saveButton.hidden = YES;
    [controlView addSubview:saveButton];

    self.saveButton = saveButton;
    
    saveButton.size = deleteButton.size;
    saveButton.left = controlView.width - 34 - saveButton.width;
    saveButton.centerY = takeButton.centerY;

    if (self.publishType ==  LYBPublishTypeVideo) {
        
        [cameralView addSubview:self.progressView];
        [cameralView bringSubviewToFront:self.progressView];
        //self.progressView.width = 100;
        self.progressView.left = 0;
        self.progressView.top = cameralViewH - self.progressView.height;
        
        [cameralView addSubview:self.tipButton];
        [cameralView bringSubviewToFront:self.tipButton];
        
        self.tipButton.left = 99;
        self.tipButton.top = cameralViewH - self.tipButton.height - 20;
        self.tipButton.hidden = YES;
    }
    
}

- (void)setUpAdjustFoucusPinGesture{
    
    UIPinchGestureRecognizer *pinch = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(adjustFoucusPinchGesture:)];
    pinch.delegate = self;
    [self.cameralView addGestureRecognizer:pinch];
}
//手势调整显示layer的大小
- (void)adjustFoucusPinchGesture:(UIPinchGestureRecognizer*)pinGesture{
    
    BOOL allTouchesAreOnThePreviewLayer = YES;
    NSUInteger numTouches = [pinGesture numberOfTouches], i;
    for ( i = 0; i < numTouches; ++i ) {
        CGPoint location = [pinGesture locationOfTouch:i inView:self.cameralView];
        CGPoint convertedLocation = [_captureVideoPreviewLayer convertPoint:location fromLayer:_captureVideoPreviewLayer.superlayer];
        if ( ! [_captureVideoPreviewLayer containsPoint:convertedLocation] ) {
            allTouchesAreOnThePreviewLayer = NO;
            break;
        }
    }
    
    if ( allTouchesAreOnThePreviewLayer ) {
        
        self.effectiveScale = self.beginGestureScale * pinGesture.scale;
        if (self.effectiveScale < 1.0){
            self.effectiveScale = 1.0;
        }
        
      //  NSLog(@"%f-------------->%f------------recognizerScale%f",self.effectiveScale,self.beginGestureScale,pinGesture.scale);
        
        CGFloat maxScaleAndCropFactor = [[_captureStillImageOutput connectionWithMediaType:AVMediaTypeVideo] videoMaxScaleAndCropFactor];//最大缩放比例
        
        NSLog(@"%f---%f",maxScaleAndCropFactor,self.effectiveScale);
        if (self.effectiveScale > maxScaleAndCropFactor)
            self.effectiveScale = maxScaleAndCropFactor;
        
        [CATransaction begin];
        [CATransaction setAnimationDuration:.025];
        [_captureVideoPreviewLayer setAffineTransform:CGAffineTransformMakeScale(self.effectiveScale, self.effectiveScale)];
        [CATransaction commit];
        
    }

}
#pragma mark gestureRecognizer delegate
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer
{
    if ( [gestureRecognizer isKindOfClass:[UIPinchGestureRecognizer class]] ) {
        self.beginGestureScale = self.effectiveScale;
    }
    return YES;
}

- (void)setupCameral{
    
    //初始化会话
    _captureSession = [[AVCaptureSession alloc] init];
    //设置分辨率
    if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
        _captureSession.sessionPreset= AVCaptureSessionPreset1280x720;
    }
    //获得输入设备
    AVCaptureDevice * captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
    if (!captureDevice) {
        DLog(@"取得后置摄像头时出现问题。");
        return;
    }
    
    NSError * error = nil;
    
    //添加一个音频输入设备
    AVCaptureDevice * audioCaptureDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
    AVCaptureDeviceInput * audioCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCaptureDevice error:&error];
    if (error) {
        DLog(@"获得设备输入对象时出错,错误原因:%@",error.localizedDescription);
        return;
    }
    
    _captureMovieFileOutPut = [[AVCaptureMovieFileOutput alloc] init];
    
    //根据输入设备初始化设备输入对象,用于获得输入数据
    _captureDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:&error];
    if (error) {
        DLog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription);
        return;
    }
    //初始化设备输出对象,用于获得输出数据
    _captureStillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    NSDictionary * outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
    //输出设置
    [_captureStillImageOutput setOutputSettings:outputSettings];
    
    //将设备输入添加到会话中
    if ([_captureSession canAddInput:_captureDeviceInput]) {
        [_captureSession addInput:_captureDeviceInput];
        [_captureSession addInput:audioCaptureDeviceInput];
        AVCaptureConnection * captureConnection = [_captureMovieFileOutPut connectionWithMediaType:AVMediaTypeVideo];
        ;
        if ([captureConnection isVideoStabilizationSupported]) {
            captureConnection.preferredVideoStabilizationMode= AVCaptureVideoStabilizationModeAuto;
        }
    }
    
    //将设输出添加到会话中
    if ([_captureSession canAddOutput:_captureStillImageOutput]) {
        [_captureSession addOutput:_captureStillImageOutput];
    }
    
    if ([_captureSession canAddOutput:_captureMovieFileOutPut]) {
        [_captureSession addOutput:_captureMovieFileOutPut];
    }
    
    
    [self addNotificationToCaptureDevice:captureDevice];

}

#pragma mark - 摄像头相关
//  给输入设备添加通知
-(void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{
    //注意添加区域改变捕获通知必须首先设置设备允许捕获
    [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
        captureDevice.subjectAreaChangeMonitoringEnabled=YES;
    }];
    //捕获区域发生改变
    [[NSNotificationCenter defaultCenter
     ]addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
/**
 *  捕获区域改变
 *
 *  @param notification 通知对象
 */
-(void)areaChange:(NSNotification *)notification{
   // DLog(@"捕获区域改变...");
}
//属性改变操作
- (void)changeDeviceProperty:(PropertyChangeBlock ) propertyChange{
    
    AVCaptureDevice * captureDevice = [_captureDeviceInput device];
    NSError * error;
    //注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
    if ([captureDevice lockForConfiguration:&error]) {
        
        propertyChange(captureDevice);
        [captureDevice unlockForConfiguration];
        
    } else {
        
      //  DLog(@"设置设备属性过程发生错误,错误信息:%@", error.localizedDescription);
    }
}

-(void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{

    [[NSNotificationCenter defaultCenter
      ] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}

-(void)removeNotification{
    
    [[NSNotificationCenter defaultCenter
      ] removeObserver:self];
}


//获取指定位置的摄像头
- (AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition) positon{
    
    NSArray * cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice * camera in cameras) {
        if ([camera position] == positon) {
            return camera;
        }
    }
    return nil;
}

- (void)addTimer{
    
    countTimer = [NSTimer scheduledTimerWithTimeInterval:timeinterval target:self selector:@selector(onTimer:) userInfo:nil repeats:YES];
    [countTimer fire];

}
- (void)removeTimer{
    
    [countTimer invalidate];
    countTimer = nil;

}

- (void)onTimer:(NSTimer*)timer{
    
    //时间到了停止录制视频
    if (self.currentVideoTime >= self.totalTime) {
        
        [self removeTimer];
        
        [_captureMovieFileOutPut stopRecording];
    }

    self.currentVideoTime += timeinterval;
    CGFloat progressWidth = self.progressView.width + self.minProgressValue;
    self.progressView.width = progressWidth;//进度条长度
    
    if (self.currentVideoTime > 0.0 && self.currentVideoTime<3.0) {
        self.tipButton.hidden = NO;
        self.deleteButton.hidden = NO;
        self.saveButton.hidden = YES;

    }
    else if (self.currentVideoTime >= 3.0) {

        self.tipButton.hidden = YES;
        self.saveButton.hidden = NO;

    }
    else{
        
        self.tipButton.hidden = YES;
        self.saveButton.hidden = NO;
    }
    
  
    
}
//闪光灯
- (void)flashClick:(UIButton*)button{
    
    NSData *currentImageData = UIImagePNGRepresentation([button imageForState:UIControlStateNormal]);
    NSData *flashImageData = UIImagePNGRepresentation([UIImage imageNamed:@"icon_flash on_nor"]);
    
    if ([currentImageData isEqualToData:flashImageData]) {//原来是闪光图片,开启闪关灯
       
        [self setupTorchOn:button];
    }
    else{
        
        [self setupTorchOff:button];

    }
    
    
}
//开启闪光灯
- (void)setupTorchOn:(UIButton*)button{
    
    [button setImage:[UIImage imageNamed:@"icon_flash off_nor"] forState:UIControlStateNormal];
    [button setImage:[UIImage imageNamed:@"icon_flash off_sel"] forState:UIControlStateHighlighted];
    
    [self setTorchMode:AVCaptureTorchModeOn];

}
//关闭闪光灯
- (void)setupTorchOff:(UIButton*)button{
    
    [button setImage:[UIImage imageNamed:@"icon_flash on_nor"] forState:UIControlStateNormal];
    [button setImage:[UIImage imageNamed:@"icon_flash on_sel"] forState:UIControlStateHighlighted];
    
    [self setTorchMode:AVCaptureTorchModeOff];
}
//切换摄像头
- (void)cameralChangeFrontBackClick:(UIButton*)button{
    
    AVCaptureDevice *currentDevice=[_captureDeviceInput device];
    AVCaptureDevicePosition currentPosition=[currentDevice position];
    AVCaptureDevice *toChangeDevice;
    AVCaptureDevicePosition toChangePosition=AVCaptureDevicePositionFront;
    //如果是未确定的和前置摄像头就改成后置摄像头
    if (currentPosition==AVCaptureDevicePositionUnspecified||currentPosition==AVCaptureDevicePositionFront) {
        toChangePosition=AVCaptureDevicePositionBack;

    }
    toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition];//获取当前的摄像头
    //获得要调整的设备输入对象
    AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil];
    
    //改变会话的配置前一定要先开启配置,配置完成后提交配置改变
    [_captureSession beginConfiguration];
    //移除原有输入对象
    [_captureSession removeInput:_captureDeviceInput];
    //添加新的输入对象
    if ([_captureSession canAddInput:toChangeDeviceInput]) {
        [_captureSession addInput:toChangeDeviceInput];
        _captureDeviceInput=toChangeDeviceInput;
    }
    //提交会话配置
    [_captureSession commitConfiguration];
    
    //关闭闪光灯
    [self setupTorchOff:self.flashButton];

    
}
//拍照
- (void)takeClick:(UIButton*)takeButton{
    
    //根据设备输出获得连接
    AVCaptureConnection *captureConnection=[_captureStillImageOutput connectionWithMediaType:AVMediaTypeVideo];
    //根据连接取得设备输出的数据
    [_captureStillImageOutput captureStillImageAsynchronouslyFromConnection:captureConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
        if (imageDataSampleBuffer) {
            NSData *imageData=[AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
            
            UIImage *image=[UIImage imageWithData:imageData];
            
            self.showImageView.hidden = NO;
            
            self.showImageView.image = image;//这个是拍摄的照片
            
            self.deleteButton.hidden = NO;
            
            self.saveButton.hidden = NO;
            
            self.takeButton.userInteractionEnabled = NO;
        }
        
    }];
    
  
}
//保存照片到相册或者直接上传到服务器
- (void)leaveWithImage:(UIImage*)image{
    
    

    
}
//按下按钮不松开录制
- (void)takeDown:(UIButton*)takeButton{
    
    if (self.currentVideoTime >= self.totalTime) {
        
//        [self.navigationController showErrWithMessage:@"视频已达到最大时长"];
        return;
    }
    
    
    //根据设备输出获得连接
    AVCaptureConnection *captureConnection=[_captureMovieFileOutPut connectionWithMediaType:AVMediaTypeVideo];
    
    
    //根据连接取得设备输出的数据
    if (![_captureMovieFileOutPut isRecording]) {
        //      self.enableRotation=NO;
        //如果支持多任务则则开始多任务
        if ([[UIDevice currentDevice] isMultitaskingSupported]) {
            self.backgroundTaskIdentifier=[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
        }
        //预览图层和视频方向保持一致
        //captureConnection.videoOrientation=[_captureVideoPreviewLayer connection].videoOrientation;
        if ([captureConnection isVideoOrientationSupported]) {
            captureConnection.videoOrientation = [self getCaptureVideoOrientation];
        }
        NSString *path = [self getVideoFilePath2CacheFolder];//录制后视频文件存储路径(未合成)
        NSLog(@"录制前的路径001-----%@",path);
        NSURL *fileUrl=[NSURL fileURLWithPath:path];
         NSLog(@"录制前的路径002-----%@",fileUrl);
        [_captureMovieFileOutPut startRecordingToOutputFileURL:fileUrl recordingDelegate:self];

    }
    else{
        
     
        [self removeTimer];
        [_captureMovieFileOutPut stopRecording];//停止录制
    }
    
}
//按下按钮松开touchUpinside停止录制
- (void)takeCancel:(UIButton*)takeButton{
    
   // [self.navigationController showErrWithMessage:@"已经cancel"];
   
    [_captureMovieFileOutPut stopRecording];
    
    [self removeTimer];
    

}
#pragma mark - 视频输出代理
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
    NSLog(@"开始录制...");
    [self removeTimer];
    [self addTimer];

}


//完成录制的代理方法,里面获取捕获到的数据
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
    //视频录入完成之后在后台将视频存储到相簿
    //  self.enableRotation=YES;
    
    //if (!outputFileURL) return;
    
    [self removeTimer];//移除定时器
    
    self.outputFileURL = outputFileURL;
    NSLog(@"录制后outputFileURL的路劲和之前输出的路径是一样的****%@",outputFileURL);
   // [self saveVideo2AlbumWithVideoUrl:outputFileURL];
   
    [self.videoUrlsArray addObject:outputFileURL];
    //时间到了
    if (self.currentVideoTime >= self.totalTime) {

        [self mergeAndExportVideosAtFileURLs:self.videoUrlsArray completionBlock:nil];
    }

    
}
//吧视频保存到相册-----暂时没有使用
- (void)saveVideo2AlbumWithVideoUrl:(NSURL*)videoFileUrl{
    
    UIBackgroundTaskIdentifier lastBackgroundTaskIdentifier = self.backgroundTaskIdentifier;
    self.backgroundTaskIdentifier=UIBackgroundTaskInvalid;
    ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init];
    [assetsLibrary writeVideoAtPathToSavedPhotosAlbum:videoFileUrl completionBlock:^(NSURL *assetURL, NSError *error) {
        if (error) {
//            [self.navigationController showErrWithMessage:@"保存视频文件出错"];
            DLog(@"保存视频到相簿过程中发生错误,错误信息:%@",error.localizedDescription);
        }
        if (lastBackgroundTaskIdentifier!=UIBackgroundTaskInvalid) {
            [[UIApplication sharedApplication] endBackgroundTask:lastBackgroundTaskIdentifier];
        }
//        [self.navigationController showSuccessWithMessage:@"成功保存视频到相册"];
       
        self.progressView.width = 0.001;
        self.deleteButton.hidden = YES;
        self.saveButton.hidden = YES;
       
       
        DLog(@"成功保存视频到相簿.");
    }];

}

//合并成功后删除存处在沙盒中的一个一个的短视频
- (void)deleteVideos
{
    for (NSURL *videoFileURL in self.videoUrlsArray) {
        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
            NSString *filePath = [[videoFileURL absoluteString] stringByReplacingOccurrencesOfString:@"file://" withString:@""];//NSURL类型的有file://,path没有file://前缀
            NSFileManager *fileManager = [NSFileManager defaultManager];
            if ([fileManager fileExistsAtPath:filePath]) {
                NSError *error = nil;
                [fileManager removeItemAtPath:filePath error:&error];
                
               // DLog(@"---------filePath = %@",filePath);

                if (error) {
                    dispatch_async(dispatch_get_main_queue(), ^{
                        
//                        [self.navigationController showErrWithMessage:@"删除视频文件出错"];
                    });
                    DLog(@"delete All Video tome%@", error);
                }
            }
        });
    }
    [self.videoUrlsArray removeAllObjects];
    self.progressView.width = 0.001;
    self.deleteButton.hidden = YES;
    self.saveButton.hidden = YES;
    self.currentVideoTime = 0.0;
    self.tipButton.hidden = YES;

}
//照片保存到相册
- (void)savePhoto2AlbumWithImage:(UIImage*)image{
    
    UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
    self.deleteButton.hidden = YES;
    self.saveButton.hidden = YES;
}
//设置相机按钮的显示状态
- (void)deletePhoto{
    
    self.deleteButton.hidden = YES;
    self.saveButton.hidden = YES;
    self.showImageView.hidden = YES;
    
    self.takeButton.userInteractionEnabled = YES;

}

-(void)setTorchMode:(AVCaptureTorchMode )torchMode{
    [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
        if ([captureDevice isTorchModeSupported:torchMode]) {
            [captureDevice setTorchMode:torchMode];
        }
    }];
}

- (void)deleteClick:(UIButton*)deleteBtn{
    
    if (self.publishType ==  LYBPublishTypeVideo) {
        
        [_captureMovieFileOutPut stopRecording];
        [self deleteVideos];//合并成功后删除存处在沙盒中的一个一个的短视频
        
    }
    else{
        
        [self deletePhoto];//设置相关按钮个view的显示
    }
    
}
//保存图片或视屏(视频是根据存储的url来合成的)
- (void)saveClick:(UIButton*)saveBtn{
    
    if (self.publishType ==  LYBPublishTypeVideo) {//保存视屏,合并视屏
        
        self.currentVideoTime = 0.0;
        [self removeTimer];
        if (_captureMovieFileOutPut.isRecording) {
            [_captureMovieFileOutPut stopRecording];
        }
        //合并视频(前面的视频片段的地址都存在了self.videoUrlsArray中)
        [self mergeAndExportVideosAtFileURLs:self.videoUrlsArray completionBlock:^(NSURL *videoUrl) {
          
            [self deleteClick:nil];//合并成功后删除存处在沙盒中的一个一个的短视频,,以及设置相机按钮的显示状态

        }];

    }
    else{//保存照片到相册
        
        [self leaveWithImage:self.showImageView.image];//保存照片到相册或者直接上传到服务器
       
        [self deletePhoto];//设置相机按钮的显示状态
        
    }

    
}
- (UIButton*)createImageButtonWithImage:(UIImage*)image selImage:(UIImage*)selImage clickSelector:(SEL)selector{
    
    UIButton *button = [UIButton buttonWithType:UIButtonTypeCustom];
    
    [button setImage:image forState:UIControlStateNormal];
    [button setImage:selImage forState:UIControlStateHighlighted];
    [button addTarget:self action:selector forControlEvents:UIControlEventTouchUpInside];
    [button sizeToFit];
    
    return button;
}
- (void)cancelClick:(UIButton*)button{
    
    [self.navigationController popViewControllerAnimated:YES];
}

- (UIImageView *)showImageView{
    
    if (!_showImageView) {
        _showImageView = [UIImageView new];
        _showImageView.contentMode = UIViewContentModeScaleAspectFill;
    }
    return _showImageView;
}

- (UIView *)progressView{
    
    if (!_progressView) {
        
        _progressView = [UIView new];
        _progressView.backgroundColor = BlueThemeColor;
        _progressView.width = 0.0001;
        _progressView.height = 5;
    }
    return _progressView;
}

- (UIButton *)tipButton{
    
    if (!_tipButton) {
        _tipButton = [UIButton new];
        [_tipButton setBackgroundImage:[UIImage imageNamed:@"icon_dialog"] forState:UIControlStateNormal];
        [_tipButton setTitle:@"视频至少拍摄3秒" forState:UIControlStateNormal];
        [_tipButton sizeToFit];
        _tipButton.width += 24;
        _tipButton.height += 12;
        _tipButton.layer.cornerRadius = 4;
        _tipButton.layer.masksToBounds = YES;
    }
    return _tipButton;
}
- (AVCaptureVideoOrientation)getCaptureVideoOrientation {
    AVCaptureVideoOrientation result;
    
    UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
    switch (deviceOrientation) {
        case UIDeviceOrientationPortrait:
        case UIDeviceOrientationFaceUp:
        case UIDeviceOrientationFaceDown:
            result = AVCaptureVideoOrientationPortrait;
            break;
        case UIDeviceOrientationPortraitUpsideDown:
            //如果这里设置成AVCaptureVideoOrientationPortraitUpsideDown,则视频方向和拍摄时的方向是相反的。
            result = AVCaptureVideoOrientationPortrait;
            break;
        case UIDeviceOrientationLandscapeLeft:
            result = AVCaptureVideoOrientationLandscapeRight;
            break;
        case UIDeviceOrientationLandscapeRight:
            result = AVCaptureVideoOrientationLandscapeLeft;
            break;
        default:
            result = AVCaptureVideoOrientationPortrait;
            break;
    }
    
    return result;
}
- (NSMutableArray *)videoUrlsArray{
    
    if (!_videoUrlsArray) {
        _videoUrlsArray = [NSMutableArray new];
    }
    return _videoUrlsArray;
}

//录制后视频文件存储路径(未合成)
- (NSString*)getVideoFilePath2CacheFolder{
    DLog(@"buddo:%@",[NSBundle mainBundle].bundlePath);
    NSFileManager *fileManager = [NSFileManager defaultManager];
    
    NSString *cachePath = NSFileManager.cachesPath;//获取沙盒路径
    NSString *videoPath = [cachePath stringByAppendingPathComponent:@"myDerectory"];//文件路径
    
    BOOL isDir = NO;//是不是文件夹
    BOOL isDirExist = [fileManager fileExistsAtPath:videoPath isDirectory:&isDir];//判断文件路径是不是存在
    
    if(!(isDirExist && isDir))
    {
        BOOL bCreateDir = [fileManager createDirectoryAtPath:videoPath withIntermediateDirectories:YES attributes:nil error:nil];//创建名字为videoPath的文件夹
        if(!bCreateDir){
            // NSLog(@"创建保存视频文件夹失败");
        }
        //   DLog(@"---------path = %@",videoPath);
        return videoPath;
    }
    
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    formatter.dateFormat = @"yyyyMMddHHmmss";
    NSString *nowTimeStr = [formatter stringFromDate:[NSDate dateWithTimeIntervalSinceNow:0.0]];// 以时间作为文件名
    
    NSString *fileName = [[videoPath stringByAppendingPathComponent:nowTimeStr] stringByAppendingString:@".mov"];//拼接上文件名后缀
    
    return fileName;
    
    
}
//吧一段一段的视屏合成起来,暂停的效果就是用合成视频来实现的,(iOS中没有现成的方法)
- (void)mergeAndExportVideosAtFileURLs:(NSArray *)fileURLArray completionBlock:(void(^)(NSURL*videoUrl))completionBlock{
    NSError *error = nil;
    
    CGSize renderSize = CGSizeMake(0, 0);
    
    NSMutableArray *layerInstructionArray = [[NSMutableArray alloc] init];//视频图层指令
    //总的工程文件,可以看成一个操作工具类
    AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
    // 时间起点
    CMTime totalDuration = kCMTimeZero;
    
    NSMutableArray *assetTrackArray = [[NSMutableArray alloc] init];//视频轨道数组
    NSMutableArray *assetArray = [[NSMutableArray alloc] init];//视频素材数组
    for (NSURL *fileURL in fileURLArray) {
        
        AVAsset *asset = [AVAsset assetWithURL:fileURL];//从url中获取视频资源
        [assetArray addObject:asset];
        
        NSArray* tmpAry =[asset tracksWithMediaType:AVMediaTypeVideo];//根据媒体类型,从视频资源中获取视频轨道,得到的是一个数组,一般的只有一个视频轨道
        if (tmpAry.count>0) {
            AVAssetTrack *assetTrack = [tmpAry objectAtIndex:0];
            [assetTrackArray addObject:assetTrack];
            renderSize.width = MAX(renderSize.width, assetTrack.naturalSize.height);
            renderSize.height = MAX(renderSize.height, assetTrack.naturalSize.width);
        }
    }
    for (int i = 0; i < [assetArray count] && i < [assetTrackArray count]; i++) {
        
        AVAsset *asset = [assetArray objectAtIndex:i];//视频素材
        AVAssetTrack *assetTrack = [assetTrackArray objectAtIndex:i];//素材中的视频轨道
        //工程文件中的音频轨道
        AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        
        NSArray*dataSourceArray= [asset tracksWithMediaType:AVMediaTypeAudio];//获取音频轨道
        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
                            ofTrack:([dataSourceArray count]>0)?[dataSourceArray objectAtIndex:0]:nil
                             atTime:totalDuration
                              error:nil];//资源文件中的音频轨道插入工程文件中的轨道
        //工程文件中的视频轨道
        AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        
        [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
                            ofTrack:assetTrack
                             atTime:totalDuration
                              error:&error];//素材资源中的视频轨道插入到工程文件中的视频轨道
        
        //利用工程文件中的视频轨道创建视频图层指令,
        AVMutableVideoCompositionLayerInstruction *layerInstruciton = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
        
        totalDuration = CMTimeAdd(totalDuration, asset.duration);//在原来的totalDuration的基础上再加上素材的时间
        
        CGFloat rate = 1.0f;
        
        CGAffineTransform layerTransform = assetTrack.preferredTransform;
        
        CGAffineTransform t = assetTrack.preferredTransform;
        if((t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0) ||
           (t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0)){
            
        }else{
            
            renderSize = CGSizeMake(renderSize.height, renderSize.width);
        }
        
        layerTransform = CGAffineTransformScale(layerTransform, rate, rate);
        
        [layerInstruciton setTransform:layerTransform atTime:kCMTimeZero];//视图层指令操作缩放,也可以设置旋转
        [layerInstruciton setOpacity:0.0 atTime:totalDuration];//视图层指令操作不透明度
        
        [layerInstructionArray addObject:layerInstruciton];
    }
    
    NSString *path = [self getVideoMergeFilePathStringFromCacheFolder];//合成后的视频文件保存的路径
    NSURL *mergeFileURL = [NSURL fileURLWithPath:path];
    //视频组合指令
    AVMutableVideoCompositionInstruction *mainInstruciton = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    mainInstruciton.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration);//指令在视频中的起作用范围
    if(totalDuration.value> 60000){
        completionBlock(nil);
        return;
    }
    DLog(@"时长%@",@(totalDuration.value));
    mainInstruciton.layerInstructions = layerInstructionArray;//设置视频组合指令中的视图层指令集合
    
    AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition]; //视频操作工程文件,用来生成video的组合指令,包含多段instruction。可以决定最终视频的尺寸,裁剪需要在这里进行;
    mainCompositionInst.instructions = @[mainInstruciton];
    mainCompositionInst.frameDuration = CMTimeMake(1, 60);
    mainCompositionInst.renderSize = renderSize;//视频的宽高
    //配置渲染参数并渲染
    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
    exporter.videoComposition = mainCompositionInst;
    exporter.outputURL = mergeFileURL;
    exporter.outputFileType = AVFileTypeMPEG4;// 导出的视频格式
    exporter.shouldOptimizeForNetworkUse = YES;
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            
            DLog(@"%@",exporter.outputURL.absoluteString);
            switch (exporter.status) {
                    
                case AVAssetExportSessionStatusFailed:
                    NSLog(@"Export failed -> Reason: %@, User Info: %@",
                          exporter.error.localizedDescription,
                          exporter.error.userInfo.description);
                    break;
                    
                case AVAssetExportSessionStatusCancelled:
                    NSLog(@"Export cancelled");
                    break;
                    
                case AVAssetExportSessionStatusCompleted:
                {
                    if (completionBlock) {
                        completionBlock(mergeFileURL);
                    }
                    
                }
                    break;
                    
                default:
                    break;
            }
            
            
        });
    }];
    
    
}

//创建合成后的视频文件保存的路径,最后合成为 mp4
- (NSString *)getVideoMergeFilePathStringFromCacheFolder
{
    NSFileManager *fileManager = [NSFileManager defaultManager];
    NSString *cachePath = NSFileManager.cachesPath;
    NSString *videoPath = [cachePath stringByAppendingPathComponent:@"myDerectory"];
    
    BOOL isDir = NO;
    BOOL isDirExist = [fileManager fileExistsAtPath:videoPath isDirectory:&isDir];
    
    if(!(isDirExist && isDir))
    {
        BOOL bCreateDir = [fileManager createDirectoryAtPath:videoPath withIntermediateDirectories:YES attributes:nil error:nil];
        if(!bCreateDir){
            // NSLog(@"创建保存视频文件夹失败");
        }
        //   DLog(@"---------path = %@",videoPath);
        
    }
    
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    formatter.dateFormat = @"yyyyMMddHHmmss";
    NSString *nowTimeStr = [formatter stringFromDate:[NSDate dateWithTimeIntervalSinceNow:0.0]];
    
    NSString *fileName = [[videoPath stringByAppendingPathComponent:nowTimeStr] stringByAppendingString:@"merge.mp4"];
    
    return fileName;
}
@end

==============================获取系统相册的全部照片或视频===================================

使用:https://www.jianshu.com/p/6bcda6b35801

http://kayosite.com/ios-development-and-detail-of-photo-framework-part-two.html

  • PHAsset: 代表照片库中的一个资源,跟 ALAsset 类似,通过 PHAsset 可以获取和保存资源
  • PHFetchOptions: 获取资源时的参数,可以传 nil,即使用系统默认值
  • PHFetchResult: 表示一系列的资源集合,也可以是相册的集合
  • PHAssetCollection: 表示一个相册或者一个时刻,或者是一个「智能相册(系统提供的特定的一系列相册,例如:最近删除,视频列表,收藏等等)
  • PHImageManager: 用于处理资源的加载,加载图片的过程带有缓存处理,可以通过传入一个 PHImageRequestOptions 控制资源的输出尺寸等规格;
  • PHImageRequestOptions: 如上面所说,控制加载图片时的一系列参数

PHCollectionList,表示一组PHCollection,它本身也是一个PHCollection,因此PHCollection 作为一个集合,可以包含其他集合,这使到 PhotoKit 的组成比 ALAssetLibrary 要复杂一些。另外与 ALAssetLibrary 相似,一个 PHAsset 可以同时属于多个不同的 PHAssetCollection。

在iOS8.0之前苹果是使用 AssetsLibrary这套框架来获取和操作系统的相册的,然而在iOS8.0之后,苹果大大推出了 PhotoKit ,PhotoKit 是一套比 AssetsLibrary更完整也更高效的库,对资源的处理跟 AssetsLibrary也有很大的不同,可以观察照片库的变化和创建提交自己修改的改变。

PHAssetCollection 获取中获取到的可以是相册也可以是资源,但无论是哪种内容,都统一使用PHFetchResult 对象封装起来。

 

// 列出所有相册智能相册
PHFetchResult *smartAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeAlbumRegular options:nil];
  
// 列出所有用户创建的相册
PHFetchResult *topLevelUserCollections = [PHCollectionList fetchTopLevelUserCollectionsWithOptions:nil];
  
// 获取所有资源的集合,并按资源的创建时间排序
PHFetchOptions *options = [[PHFetchOptions alloc] init];
options.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:YES]];
PHFetchResult *assetsFetchResults = [PHAsset fetchAssetsWithOptions:options];
使用:
 UIImageView *imagV=[[UIImageView alloc]initWithFrame:CGRectMake(0, 100, 300,500)];
    [self.view addSubview:imagV];
    
    __weak typeof (self)weakSelf=self;
   NSArray *arr1= [self getAllSourcer];//1.直接获取所有的PHAsset集合
     NSArray *arr2= [self GetALLphotosUsingPohotKit];//2.先获取所有相册,然后从相机胶卷中获取PHAsset集合
    PHAsset *asset=(PHAsset *)arr1[45];
   //根据PHAsset获取原图
        [weakSelf accessToImageAccordingToTheAsset:asset size:PHImageManagerMaximumSize resizeMode:PHImageRequestOptionsResizeModeNone completion:^(UIImage *image, NSDictionary *info) {
            imagV.image=image;
        }];


*********

/**
 获取相测中的所有资源
 */

#import <UIKit/UIKit.h>
#import<Photos/Photos.h>
NS_ASSUME_NONNULL_BEGIN

@interface UIResponder (GetSystemAlbumLibrarySource)
//1.直接获取所有PHAsset资源的集合
-(NSMutableArray *)getAllSourcer;
//2.先获取所有相册,然后从相机胶卷中获取PHAsset集合
-(NSMutableArray*)GetALLphotosUsingPohotKit;
//根据PHAsset获取原图片信息
- (void)accessToImageAccordingToTheAsset:(PHAsset *)asset size:(CGSize)size resizeMode:(PHImageRequestOptionsResizeMode)resizeMode completion:(void(^)(UIImage *image,NSDictionary *info))completion;
@end

NS_ASSUME_NONNULL_END


*******
/**
 获取相测中的所有资源
 */

#import "UIResponder+GetSystemAlbumLibrarySource.h"

@implementation UIResponder (GetSystemAlbumLibrarySource)
//1.获取所有PHAsset资源的集合
-(NSMutableArray *)getAllSourcer{
    NSMutableArray *arr = [NSMutableArray array];
    PHFetchOptions *options = [[PHFetchOptions alloc] init];
    options.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:YES]];
    PHFetchResult *assetsFetchResults = [PHAsset fetchAssetsWithOptions:options];
    // 遍历,得到每一个图片资源asset,然后放到集合中
    [assetsFetchResults enumerateObjectsUsingBlock:^(PHAsset *asset, NSUInteger idx, BOOL * _Nonnull stop) {
        [arr addObject:asset];
    }];
    return arr;
}

//2.先获取所有相册,然后从相机胶卷中获取PHAsset集合,相机胶卷时相册中的一个。
-(NSMutableArray*)GetALLphotosUsingPohotKit
{
    NSMutableArray *arr = [NSMutableArray array];
    // 所有智能相册集合(系统自动创建的相册)
    PHFetchResult *smartAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeAlbumRegular options:nil];
    
    //遍历得到每一个相册
    for (NSInteger i = 0; i < smartAlbums.count; i++) {
        // 是否按创建时间排序
        PHFetchOptions *option = [[PHFetchOptions alloc] init];
        option.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:YES]];//时间排序
        option.predicate = [NSPredicate predicateWithFormat:@"mediaType == %ld", PHAssetMediaTypeImage];//照片
        PHCollection *collection = smartAlbums[i];//得到一个相册
        /**
         相册标题英文:
         Portrait、Long Exposure、Panoramas、Hidden、Recently Deleted、Live Photos、Videos、Animated、Recently Added、Slo-mo、Time-lapse、Bursts、Camera Roll、Screenshots、Favorites、Selfies
         */
        NSLog(@"相册标题---%@",collection.localizedTitle);
        //遍历获取相册
        if ([collection isKindOfClass:[PHAssetCollection class]]) {
            if ([collection.localizedTitle isEqualToString:@"相机胶卷"]||[collection.localizedTitle isEqualToString:@"Camera Roll"]) {//相册的名字是相机交卷,这里面包含了所有的资源,包括照片、视频、gif。 注意相册名字中英文
                PHAssetCollection *assetCollection = (PHAssetCollection *)collection;
                //collection中的资源都统一使用PHFetchResult 对象封装起来。
                //得到PHFetchResult封装的图片资源集合
                PHFetchResult *fetchResult = [PHAsset fetchAssetsInAssetCollection:assetCollection options:nil];
                
                NSArray *assets;
                if (fetchResult.count > 0) {
                    //某个相册里面的所有PHAsset对象(PHAsset对象对应的是图片,需要通过方法请求到图片)
                    assets = [self getAllPhotosAssetInAblumCollection:assetCollection ascending:YES ];
                    [arr addObjectsFromArray:assets];
                }
            }
        }
    }
    //返回相机胶卷内的所有照片
    return arr;
}

//获取一个相册里的所有图片的PHAsset对象
- (NSArray *)getAllPhotosAssetInAblumCollection:(PHAssetCollection *)assetCollection ascending:(BOOL)ascending
{
    // 存放所有图片对象
    NSMutableArray *assets = [NSMutableArray array];
    
    // 是否按创建时间排序
    PHFetchOptions *option = [[PHFetchOptions alloc] init];
    option.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:ascending]];
    option.predicate = [NSPredicate predicateWithFormat:@"mediaType == %ld", PHAssetMediaTypeImage];
    
    // 获取所有图片对象
    PHFetchResult *result = [PHAsset fetchAssetsInAssetCollection:assetCollection options:option];
    // 遍历,得到每一个图片资源asset,然后放到集合中
    [result enumerateObjectsUsingBlock:^(PHAsset *asset, NSUInteger idx, BOOL * _Nonnull stop) {
        [assets addObject:asset];
    }];
    
    return assets;
}

//根据PHAsset获取原图片信息
- (void)accessToImageAccordingToTheAsset:(PHAsset *)asset size:(CGSize)size resizeMode:(PHImageRequestOptionsResizeMode)resizeMode completion:(void(^)(UIImage *image,NSDictionary *info))completion
{
    static PHImageRequestID requestID = -2;
    
    CGFloat scale = [UIScreen mainScreen].scale;
    CGFloat width = MIN([UIScreen mainScreen].bounds.size.width, 500);
    if (requestID >= 1 && size.width / width == scale) {
        [[PHCachingImageManager defaultManager] cancelImageRequest:requestID];
    }
    PHImageRequestOptions *option = [[PHImageRequestOptions alloc] init];
    option.deliveryMode = PHImageRequestOptionsDeliveryModeOpportunistic;
    //    option.resizeMode = PHImageRequestOptionsResizeModeFast;
    option.resizeMode = resizeMode;
    
    requestID = [[PHCachingImageManager defaultManager] requestImageForAsset:asset targetSize:size contentMode:PHImageContentModeAspectFill options:option resultHandler:^(UIImage * _Nullable result, NSDictionary * _Nullable info) {
        dispatch_async(dispatch_get_main_queue(), ^{
            completion(result,info);
            
        });
    }];
    
}

@end

*******相册中获取视频

调用:

NSMutableArray *videosarr=[self getallvideos];
    if(videosarr.count>0){
        PHAsset *videoasset=videosarr[1];
        [self getcanusevideoWith:videoasset completion:^(AVAsset * _Nullable asset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info) {
            // 上传视频时用到data
            AVURLAsset *urlAsset = (AVURLAsset *)asset;
            NSLog(@"----%@",urlAsset.URL);
            AVPlayerViewController *playerVc = [[AVPlayerViewController alloc]init];
    playerVc.player = [[AVPlayer alloc]initWithURL:urlAsset.URL];
  [self presentViewController:playerVc animated:YES completion:nil];
          
        }];
    }





#import <UIKit/UIKit.h>
#import<Photos/Photos.h>
NS_ASSUME_NONNULL_BEGIN

@interface UIResponder (GetSystemAlbumLibrarySource)

//获取视频资源
-(NSMutableArray *)getallvideos;
//解析出可用的视频
-(void)getcanusevideoWith:( PHAsset *)videoAsset completion:(void(^)(AVAsset * _Nullable asset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info))completion;
@end

NS_ASSUME_NONNULL_END

********

//1.遍历相册获取视频资源
-(NSMutableArray *)getallvideos{
    NSMutableArray *allAlbumArray=[[NSMutableArray alloc]init];
    PHFetchResult<PHAssetCollection *> *assetCollections = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeSmartAlbumVideos options:nil];
    // 遍历所有的自定义相簿
    for (PHAssetCollection *assetCollection in assetCollections) {
        NSLog(@"相册名称--……%@",assetCollection.localizedTitle);
        // 相簿存储到数组
        if ([assetCollection isKindOfClass:[PHAssetCollection class]]) {
            if ([assetCollection.localizedTitle isEqualToString:@"Videos"]){
                // 获得相簿albumCollection中的所有PHAsset对象并存储在集合albumAssets中
                PHFetchResult<PHAsset *> *albumAssets = [PHAsset fetchAssetsInAssetCollection:assetCollection options:nil];
               allAlbumArray= [self enumerateWithalbumAssets:albumAssets arr:allAlbumArray];
            }
        }
    }
    return allAlbumArray;
}

//获取所有视频资源
-(NSMutableArray *)enumerateWithalbumAssets:(PHFetchResult<PHAsset *> *)albumAssets arr:(NSMutableArray *)allAlbumArray{
    // 取出一个视频对象, 这里假设albumAssets集合有视频文件
    PHAsset *videoAsset;
    for (PHAsset *asset in albumAssets) {
        // mediaType文件类型
        // PHAssetMediaTypeUnknown = 0, 位置类型
        // PHAssetMediaTypeImage   = 1, 图片
        // PHAssetMediaTypeVideo   = 2, 视频
        // PHAssetMediaTypeAudio   = 3, 音频
        int fileType = asset.mediaType;
        // 区分文件类型, 取视频文件
        if (fileType == PHAssetMediaTypeVideo)
        {
            // 取出视频文件
            videoAsset = asset;
            // 取到一个视频对象就不再遍历, 因为这里我们只需要一个视频对象做示例
            [allAlbumArray addObject:videoAsset];
        }
    }
    return allAlbumArray;
}
//解析出可用的视频
-(void)getcanusevideoWith:( PHAsset *)videoAsset completion:(void(^)(AVAsset * _Nullable asset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info))completion{
        PHVideoRequestOptions *options = [[PHVideoRequestOptions alloc] init];
        options.version = PHImageRequestOptionsVersionCurrent;
        options.deliveryMode = PHVideoRequestOptionsDeliveryModeAutomatic;
        [[PHImageManager defaultManager]requestAVAssetForVideo:videoAsset options:options resultHandler:^(AVAsset * _Nullable asset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info) {
        
            completion(asset,audioMix,info);
        }];
}

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值