ios 开发人脸识别,轻量级

公司项目中加入了人脸识别打卡,发现网上人脸识别的内容很少 ,就发下自己的一个界面代码,希望对一些人有帮助,只是一个轻量级,实现基本的摄像头调用,拍照,然后创建个体,然后人脸识别,当然创建,识别都是调的腾讯云的接口。

首先界面不能用系统自带的,必定系统的你自定义不了界面。

//
//  JHFaceViewController.m
//  CTBAN_B_iOS
//
//  Created by jzw on 2018/5/30.
//  Copyright © 2018年 CTBAN. All rights reserved.
//

#import "JHFaceUIViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <QiniuSDK.h>
#import "JHCheckViewController.h"

#define clamp(a) (a>255?255:(a<0?0:a))
typedef NS_ENUM(NSInteger,VideoStatus){
    VideoStatusEnded = 0,
    VideoStatusStarted
};
@interface JHFaceUIViewController ()
<AVCaptureFileOutputRecordingDelegate,UIAlertViewDelegate>{
    AVCaptureSession *_captureSession;
    AVCaptureDevice *_videoDevice;
    AVCaptureDevice *_audioDevice;
    AVCaptureDeviceInput *_videoInput;
    AVCaptureDeviceInput *_audioInput;
    AVCaptureMovieFileOutput *_movieOutput;
    AVCaptureStillImageOutput *_stillImageOutput;
    AVCaptureVideoPreviewLayer *_captureVideoPreviewLayer;
}

@property (nonatomic,strong) NSLayoutConstraint *progressWidth;
@property (nonatomic,strong) UIView *progressView;
@property (nonatomic,strong) UILabel *cancelTip;
@property (nonatomic,strong) UILabel *tapBtn;
@property (nonatomic,strong) UIView *videoView;
@property (nonatomic,strong) UIButton *changeBtn;
@property (nonatomic,strong) UIButton *flashModelBtn;
@property (nonatomic,weak) UIView *focusCircle;
@property (nonatomic,assign) VideoStatus status;
@property (nonatomic,assign) BOOL canSave;
@property (nonatomic,strong) CADisplayLink *link;
@property (nonatomic,strong) UIButton *cancelButton;
@property (nonatomic,strong) UIButton *takeButton;
@property (nonatomic,strong) UIButton *turnButton;
@property (nonatomic,strong) KyoURLSessionTask *upHeadImageTask;
@property (nonatomic,strong) KyoURLSessionTask *modificationTask;
@property (nonatomic,strong) KyoURLSessionTask *newpersonTask;


@property (nonatomic,strong)NSString *resUrl;
@property (nonatomic,strong)NSString *key;
@property (nonatomic,strong)NSString *token;

@end

@implementation JHFaceUIViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    [self initUI];
    [self getAuthorization];
    // Do any additional setup after loading the view.
}

- (void)initUI
{
    self.videoView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, KScreenW, KScreenH)];
    [self.view addSubview:_videoView];
    
    self.cancelButton = [UIButton new];
    [self.view addSubview:_cancelButton];
    [_cancelButton setTitle:@"取消" forState:UIControlStateNormal];
    [_cancelButton setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal];
    _cancelButton.titleLabel.font = Font(16);
    [_cancelButton mas_makeConstraints:^(MASConstraintMaker *make) {
        make.bottom.mas_equalTo(-40);
        make.left.mas_equalTo(30);
        make.height.mas_equalTo(20);
        make.width.mas_equalTo(40);
    }];
    [_cancelButton addTarget:self action:@selector(cancelButtonClick:) forControlEvents:UIControlEventTouchUpInside];
    
    self.takeButton = [UIButton new];
    [self.view addSubview:_takeButton];
    [_takeButton setImage:[UIImage imageNamed:@"facePhoto"] forState:UIControlStateNormal];
    [_takeButton setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal];
    _takeButton.titleLabel.font = Font(16);
    [_takeButton mas_makeConstraints:^(MASConstraintMaker *make) {
        make.bottom.mas_equalTo(-30);
        make.centerX.equalTo(self.view.mas_centerX);
        make.height.mas_equalTo(80);
        make.width.mas_equalTo(80);
    }];
    _takeButton.clipsToBounds = YES;
    _takeButton.layer.cornerRadius = 40;
    [_takeButton addTarget:self action:@selector(takeButtonClick:) forControlEvents:UIControlEventTouchUpInside];
    
    self.turnButton = [UIButton new];
    [self.view addSubview:_turnButton];
    [_turnButton setImage:[UIImage imageNamed:@"faceChange"] forState:UIControlStateNormal];
    [_turnButton setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal];
    _turnButton.titleLabel.font = Font(16);
    [_turnButton mas_makeConstraints:^(MASConstraintMaker *make) {
        make.bottom.mas_equalTo(-40);
        make.right.mas_equalTo(-30);
        make.height.mas_equalTo(40);
        make.width.mas_equalTo(40);
    }];
    [_turnButton addTarget:self action:@selector(turnButtonClick:) forControlEvents:UIControlEventTouchUpInside];
    
    self.flashModelBtn = [UIButton new];
    [self.videoView addSubview:_flashModelBtn];
    _flashModelBtn.frame = CGRectMake(30, 30, 150, 20);
    [_flashModelBtn setTitle:@"闪光灯" forState:UIControlStateNormal];
    [_flashModelBtn setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal];

    _changeBtn = [UIButton new];
    [self.videoView addSubview:_changeBtn];
    _changeBtn.frame = CGRectMake(KScreenW - 180, 30, 150, 30);
    [_changeBtn setTitle:@"镜头切换" forState:UIControlStateNormal];
    [_changeBtn setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal];

    self.cancelTip = [UILabel new];
    [self.videoView addSubview:_cancelTip];
    _cancelTip.text = @"滑动取消";
    _cancelTip.textColor = [UIColor whiteColor];
    [_cancelTip mas_makeConstraints:^(MASConstraintMaker *make) {
        make.bottom.mas_equalTo(-20);
        make.centerX.equalTo(_videoView.mas_centerX);
        make.width.mas_equalTo(80);
        make.height.mas_equalTo(20);
    }];
    
    self.tapBtn = [UILabel new];
    [self.view addSubview:_tapBtn];
    [_tapBtn mas_makeConstraints:^(MASConstraintMaker *make) {
        make.bottom.mas_equalTo(-50);
        make.centerX.equalTo(self.view.mas_centerX);
        make.height.width.mas_equalTo(100);
    }];
    [_tapBtn setText:@"按住"];
    _tapBtn.hidden = YES;

    [self.view bringSubviewToFront:self.cancelTip];
    [self.view bringSubviewToFront:self.progressView];
    [self.view bringSubviewToFront:self.changeBtn];
    [self.view bringSubviewToFront:self.flashModelBtn];
    self.videoView.layer.masksToBounds = YES;
    _tapBtn.layer.borderColor = [UIColor greenColor].CGColor;
//    [self addGenstureRecognizer];
}
//获取授权
- (void)getAuthorization {
    /*
     AVAuthorizationStatusNotDetermined = 0,// 未进行授权选择

     AVAuthorizationStatusRestricted,    // 未授权,且用户无法更新,如家长控制情况下

     AVAuthorizationStatusDenied,       // 用户拒绝App使用

     AVAuthorizationStatusAuthorized,    // 已授权,可使用
     */
    //此处获取摄像头授权
    switch ([AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo])
    {
        case AVAuthorizationStatusAuthorized:       //已授权,可使用    The client is authorized to access the hardware supporting a media type.
        {
            NSLog(@"授权摄像头使用成功");
            
            [self setupAVCaptureInfo];
            break;
        }
        case AVAuthorizationStatusNotDetermined:    //未进行授权选择     Indicates that the user has not yet made a choice regarding whether the client can access the hardware.
        {
            //则再次请求授权
            [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
                if(granted){    //用户授权成功
                    dispatch_async(dispatch_get_main_queue(), ^{
                         //获取摄像头权限成功后 ,加载AVCaptureDevice控件,放到主线程就ok了
                        [self setupAVCaptureInfo]; //不放到主线程 界面第一时间刷新不了
                    });
//                    [self setupAVCaptureInfo];
                    return;
                } else {        //用户拒绝授权
                    [self pop];
//                    [self showMsgWithTitle:@"出错了" andContent:@"用户拒绝授权摄像头的使用权,返回上一页.请打开\n设置-->隐私/通用等权限设置"];
                    return;
                }
            }];
            break;
        }
        default:                                    //用户拒绝授权/未授权
        {
//            [self pop];
            [self showMsgWithTitle:@"出错了" andContent:@"拒绝授权,返回上一页.请检查下\n设置-->隐私/通用等权限设置"];
            break;
        }
    }
}
- (void)setupAVCaptureInfo
{
    [self addSession];

    [_captureSession beginConfiguration];

    [self addVideo];
    [self addAudio];
    [self addPreviewLayer];

    [_captureSession commitConfiguration];

    //开启会话-->注意,不等于开始录制
    [_captureSession startRunning];

}
- (void)addSession
{
    _captureSession = [[AVCaptureSession alloc] init];
    //设置视频分辨率
    /*  通常支持如下格式
     (
     AVAssetExportPresetLowQuality,
     AVAssetExportPreset960x540,
     AVAssetExportPreset640x480,
     AVAssetExportPresetMediumQuality,
     AVAssetExportPreset1920x1080,
     AVAssetExportPreset1280x720,
     AVAssetExportPresetHighestQuality,
     AVAssetExportPresetAppleM4A
     )
     */
    //注意,这个地方设置的模式/分辨率大小将影响你后面拍摄照片/视频的大小,
    if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
        [_captureSession setSessionPreset:AVCaptureSessionPreset640x480];
    }
}
- (void)addVideo
{
    // 获取摄像头输入设备, 创建 AVCaptureDeviceInput 对象
    /* MediaType
     AVF_EXPORT NSString *const AVMediaTypeVideo                 NS_AVAILABLE(10_7, 4_0);       //视频
     AVF_EXPORT NSString *const AVMediaTypeAudio                 NS_AVAILABLE(10_7, 4_0);       //音频
     AVF_EXPORT NSString *const AVMediaTypeText                  NS_AVAILABLE(10_7, 4_0);
     AVF_EXPORT NSString *const AVMediaTypeClosedCaption         NS_AVAILABLE(10_7, 4_0);
     AVF_EXPORT NSString *const AVMediaTypeSubtitle              NS_AVAILABLE(10_7, 4_0);
     AVF_EXPORT NSString *const AVMediaTypeTimecode              NS_AVAILABLE(10_7, 4_0);
     AVF_EXPORT NSString *const AVMediaTypeMetadata              NS_AVAILABLE(10_8, 6_0);
     AVF_EXPORT NSString *const AVMediaTypeMuxed                 NS_AVAILABLE(10_7, 4_0);
     */

    /* AVCaptureDevicePosition
     typedef NS_ENUM(NSInteger, AVCaptureDevicePosition) {
     AVCaptureDevicePositionUnspecified         = 0,
     AVCaptureDevicePositionBack                = 1,            //后置摄像头
     AVCaptureDevicePositionFront               = 2             //前置摄像头
     } NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
     */
    _videoDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionFront];
    
    [self addVideoInput];
    [self addMovieOutput];
    [self addImageOutput];
}
- (void)addAudio
{
    NSError *audioError;
    // 添加一个音频输入设备
    _audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    //  音频输入对象
    _audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:_audioDevice error:&audioError];
    if (audioError) {
        NSLog(@"取得录音设备时出错 ------ %@",audioError);
        return;
    }
    // 将音频输入对象添加到会话 (AVCaptureSession) 中
    if ([_captureSession canAddInput:_audioInput]) {
        [_captureSession addInput:_audioInput];
    }
}
- (void)addPreviewLayer
{

    [self.view layoutIfNeeded];

    // 通过会话 (AVCaptureSession) 创建预览层
    _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
//    _captureVideoPreviewLayer.frame = self.view.layer.bounds;
    _captureVideoPreviewLayer.frame = CGRectMake(0, 0, KScreenH,KScreenH);
    /* 填充模式
     Options are AVLayerVideoGravityResize, AVLayerVideoGravityResizeAspect and AVLayerVideoGravityResizeAspectFill. AVLayerVideoGravityResizeAspect is default.
     */
    //有时候需要拍摄完整屏幕大小的时候可以修改这个
    //    _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    // 如果预览图层和视频方向不一致,可以修改这个
    _captureVideoPreviewLayer.connection.videoOrientation = [_movieOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation;
    _captureVideoPreviewLayer.position = CGPointMake(self.view.width*0.5,self.videoView.height*0.5);

    // 显示在视图表面的图层
    CALayer *layer = self.videoView.layer;
    layer.masksToBounds = true;
    [self.view layoutIfNeeded];
    [layer addSublayer:_captureVideoPreviewLayer];

}
#pragma mark 获取摄像头-->前/后

- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
    AVCaptureDevice *captureDevice = devices.firstObject;

    for ( AVCaptureDevice *device in devices ) {
        if ( device.position == position ) {
            captureDevice = device;
            break;
        }
    }

    return captureDevice;
}
- (void)addMovieOutput
{
    // 拍摄视频输出对象
    // 初始化输出设备对象,用户获取输出数据
    _movieOutput = [[AVCaptureMovieFileOutput alloc] init];

    if ([_captureSession canAddOutput:_movieOutput]) {
        [_captureSession addOutput:_movieOutput];
        AVCaptureConnection *captureConnection = [_movieOutput connectionWithMediaType:AVMediaTypeVideo];

        //设置视频旋转方向
        /*
         typedef NS_ENUM(NSInteger, AVCaptureVideoOrientation) {
         AVCaptureVideoOrientationPortrait           = 1,
         AVCaptureVideoOrientationPortraitUpsideDown = 2,
         AVCaptureVideoOrientationLandscapeRight     = 3,
         AVCaptureVideoOrientationLandscapeLeft      = 4,
         } NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
         */
        //        if ([captureConnection isVideoOrientationSupported]) {
        //            [captureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
        //        }

        // 视频稳定设置
        if ([captureConnection isVideoStabilizationSupported]) {
            captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
        }

        captureConnection.videoScaleAndCropFactor = captureConnection.videoMaxScaleAndCropFactor;
    }
}
- (void)addImageOutput {
    
    _stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    if ([_captureSession canAddOutput:_stillImageOutput]) {
        [_captureSession addOutput:_stillImageOutput];
    }
    
}
- (void)addVideoInput
{
    NSError *videoError;

    // 视频输入对象
    // 根据输入设备初始化输入对象,用户获取输入数据
    _videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_videoDevice error:&videoError];
    if (videoError) {
        NSLog(@"---- 取得摄像头设备时出错 ------ %@",videoError);
        return;
    }
    
    // 将视频输入对象添加到会话 (AVCaptureSession) 中
    if ([_captureSession canAddInput:_videoInput]) {
        [_captureSession addInput:_videoInput];
    }
    
}
-(void)addGenstureRecognizer{

    UITapGestureRecognizer *singleTapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(singleTap:)];
    singleTapGesture.numberOfTapsRequired = 1;
    singleTapGesture.delaysTouchesBegan = YES;
    
    UITapGestureRecognizer *doubleTapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(doubleTap:)];
    doubleTapGesture.numberOfTapsRequired = 2;
    doubleTapGesture.delaysTouchesBegan = YES;

    [singleTapGesture requireGestureRecognizerToFail:doubleTapGesture];
    [self.videoView addGestureRecognizer:singleTapGesture];
    [self.videoView addGestureRecognizer:doubleTapGesture];
}
//转换摄像头
- (void)turnButtonClick:(UIButton *)sender {
    
    switch (_videoDevice.position) {
        case AVCaptureDevicePositionBack:
            _videoDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionFront];
            break;
        case AVCaptureDevicePositionFront:
            _videoDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
            break;
        default:
            return;
            break;
    }
    
    [self changeDevicePropertySafety:^(AVCaptureDevice *captureDevice) {
        NSError *error;
        AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_videoDevice error:&error];
        
        if (newVideoInput != nil) {
            //必选先 remove 才能询问 canAdd
            [_captureSession removeInput:_videoInput];
            if ([_captureSession canAddInput:newVideoInput]) {
                [_captureSession addInput:newVideoInput];
                _videoInput = newVideoInput;
            }else{
                [_captureSession addInput:_videoInput];
            }
            
        } else if (error) {
            NSLog(@"切换前/后摄像头失败, error = %@", error);
        }
    }];
    
    
}
//更改设备属性前一定要锁上
-(void)changeDevicePropertySafety:(void (^)(AVCaptureDevice *captureDevice))propertyChange{
    //也可以直接用_videoDevice,但是下面这种更好
    AVCaptureDevice *captureDevice= [_videoInput device];
    NSError *error;
    //注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁,意义是---进行修改期间,先锁定,防止多处同时修改
    BOOL lockAcquired = [captureDevice lockForConfiguration:&error];
    if (!lockAcquired) {
        NSLog(@"锁定设备过程error,错误信息:%@",error.localizedDescription);
    }else{
        [_captureSession beginConfiguration];
        propertyChange(captureDevice);
        [captureDevice unlockForConfiguration];
        [_captureSession commitConfiguration];
    }
}

- (void)captureOutput:(AVCaptureFileOutput *)output didOutputSampleBuffer:(nonnull CMSampleBufferRef)sampleBuffer fromConnection:(nonnull AVCaptureConnection *)connection {
    
    UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
    
}
- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    uint8_t *yBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
    size_t yPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
    uint8_t *cbCrBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
    size_t cbCrPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
    
    int bytesPerPixel = 4;
    uint8_t *rgbBuffer = malloc(width * height * bytesPerPixel);
    
    for(int y = 0; y < height; y++) {
        uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel];
        uint8_t *yBufferLine = &yBuffer[y * yPitch];
        uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch];
        
        for(int x = 0; x < width; x++) {
            int16_t y = yBufferLine[x];
            int16_t cb = cbCrBufferLine[x & ~1] - 128;
            int16_t cr = cbCrBufferLine[x | 1] - 128;
            
            uint8_t *rgbOutput = &rgbBufferLine[x*bytesPerPixel];
            
            int16_t r = (int16_t)roundf( y + cr *  1.4 );
            int16_t g = (int16_t)roundf( y + cb * -0.343 + cr * -0.711 );
            int16_t b = (int16_t)roundf( y + cb *  1.765);
            
            rgbOutput[0] = 0xff;
            rgbOutput[1] = clamp(b);
            rgbOutput[2] = clamp(g);
            rgbOutput[3] = clamp(r);
        }
    }
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(rgbBuffer, width, height, 8, width * bytesPerPixel, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast);
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    UIImage *image = [UIImage imageWithCGImage:quartzImage];
    
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    CGImageRelease(quartzImage);
    free(rgbBuffer);
    
    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    
    return image;
}
//拍照上传七牛并提交
- (void)takeButtonClick:(UIButton *)sender {
    
    AVCaptureConnection * videoConnection = [_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
    
    for (AVCaptureConnection *connection in _stillImageOutput.connections) {
        for (AVCaptureInputPort *port in [connection inputPorts]) {
            if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
                
                videoConnection = connection;
                break;
            }
        }
    }
    
    if (!videoConnection) {
        NSLog(@"take photo failed!");
        return;
    }
    
    self.takeButton.userInteractionEnabled = NO;
    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(3 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
        self.takeButton.userInteractionEnabled = YES;
    });
    
    [_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
        if (imageDataSampleBuffer == NULL) {
            self.takeButton.userInteractionEnabled = YES;
            return;
        }
        NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
        UIImage *image = [UIImage imageWithData:imageData];
        
        if ([UserInfo sharedUserInfo].userFace.length > 0) {
            [self checkFaceRightWithImage:imageData];
        }else{
             [self networkSendModifyAvatar:image];
        }
       
//        [self checkFaceRightWithImage:imageData];
        
    }];

}

- (void)checkFaceRightWithImage:(NSData *)imageData {
    AFHTTPSessionManager *sessionManager = [[AFHTTPSessionManager alloc] init];
    sessionManager.responseSerializer.acceptableContentTypes = [NSSet setWithArray:@[@"application/json",@"text/html",@"image/jpeg",@"image/png",@"application/octet-stream",@"text/json",@"text/plain",@"multipart/form-data"]];
    sessionManager.requestSerializer = [AFHTTPRequestSerializer serializer];
    NSDictionary *tempDict = [NSDictionary dictionaryWithObjectsAndKeys:[UserInfo sharedUserInfo].uuidCode,@"personId", nil];
    
//    [sessionManager POST:checkFaceVerify parameters:tempDict progress:nil success:^(NSURLSessionDataTask * _Nonnull task, id  _Nullable responseObject) {
//
//    } failure:^(NSURLSessionDataTask * _Nullable task, NSError * _Nonnull error) {
//
//    }];
    
    [sessionManager POST:checkFaceVerify parameters:tempDict constructingBodyWithBlock:^(id<AFMultipartFormData>  _Nonnull formData) {
        NSDateFormatter *formatter = [[NSDateFormatter alloc]init];
        formatter.dateFormat =@"yyyyMMddHHmmss";
        NSString *str = [formatter stringFromDate:[NSDate date]];
        NSString *fileName = [NSString stringWithFormat:@"%@.jpg", str];
        [formData appendPartWithFileData:imageData name:@"image" fileName:fileName mimeType:@"image/png"];
    } progress:nil success:^(NSURLSessionDataTask * _Nonnull task, id  _Nullable responseObject) {
        
        NSDictionary *tempDict = [NSDictionary dictionaryWithDictionary:responseObject];
      
        if ([[responseObject objectForKey:@"code"] integerValue] == 0) {
            if ([[responseObject[@"data"] objectForKey:@"ismatch"] boolValue] == YES) {
                if (self.delegate &&[self.delegate respondsToSelector:@selector(getCheckNetWorkWithDict:)]) {
                    [self.delegate getCheckNetWorkWithDict:tempDict];
                }
                [self dismissViewControllerAnimated:YES completion:nil];
            }else{
                [KyoUtil showMessageHUD:@"人脸检测失败,请重新拍照" withTimeInterval:1.2 inView:self.view];
                self.takeButton.userInteractionEnabled = YES;
            }
        }else{
            [KyoUtil showMessageHUD:@"人脸检测失败,请重新拍照" withTimeInterval:1.2 inView:self.view];
            self.takeButton.userInteractionEnabled = YES;
        }
        
    } failure:^(NSURLSessionDataTask * _Nullable task, NSError * _Nonnull error) {
        self.takeButton.userInteractionEnabled = YES;
    }];
//    [[NetworkHelp shareNetwork] postNetwork:tempDict serverAPIUrl:checkFaceVerify completionBlock:^(NSDictionary *dict, NetworkResultModel *resultModel) {
//
//    } errorBlock:^(NSError *error) {
//
//    } finishedBlock:^(NSError *error) {
//
//    }];
}
#pragma mark --------------------
#pragma mark - 上传照片
/**
 *  网络操作  编辑头像  上传照片
 */
- (void)networkSendModifyAvatar:(UIImage *)image{
    
    [KyoUtil clearOperation:self.upHeadImageTask];
    
    image = [UIImage imageCompressForSize:image targetSize:CGSizeMake(200, 200)];
    
    NSData *imagedata=UIImageJPEGRepresentation(image,0.5f);
    
    NSString *strImage = [imagedata base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength];
    
    NSUInteger resSize =  [strImage lengthOfBytesUsingEncoding:NSUTF8StringEncoding];
    
    NSString *fileName =  [NSString stringWithFormat:@"headImgAddr%ld.jpg",[UserInfo sharedUserInfo].id];
    
    NSString *URLString = checkUpLoadGetToken;
   
    NSDictionary *params = @{@"resName":fileName,
                             @"resSize":@(resSize)
                             };
    
    self.upHeadImageTask = [[NetworkHelp shareNetwork] postNetwork:params serverAPIUrl:URLString  completionBlock:^(NSDictionary *dict, NetworkResultModel *resultModel) {
        
        if ([NetworkHelp checkDataFromNetwork:dict errorShowInView:self.view]) {
            
            self.resUrl = resultModel.Data[@"resUrl"];
            self.key = resultModel.Data[@"key"];
            self.token = resultModel.Data[@"token"];
            
            [KyoUtil hideLoadingHUD:0 withView:self.view];
            
            dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.1f * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
                
                [self networkConnectQiniuData:imagedata key:self.key token:self.token];
                
            });
            
        }else{
            [KyoUtil hideLoadingHUD:0 withView:self.view];
         
        }
        self.takeButton.userInteractionEnabled = YES;
    } errorBlock:^(NSError *error) {
        self.takeButton.userInteractionEnabled = YES;
        [KyoUtil hideLoadingHUD:0 withView:self.view];
    } finishedBlock:^(NSError *error) {
        [KyoUtil hideLoadingHUD:0 withView:self.view];
    }];
    
}

- (void)networkConnectQiniuData:(id)data key:(NSString *)key token:(NSString *)token{
    
    QNUploadManager *upManager = [[QNUploadManager alloc] init];
    [upManager putData:data key:key token:token
              complete: ^(QNResponseInfo *info, NSString *key, NSDictionary *resp) {
        
                //上传到添加人脸识别个体 
                  [self creatFaceNewPerson];
              } option:nil];
    
}
- (void)creatFaceNewPerson {
    
    [KyoUtil clearOperation:self.newpersonTask];
    NSString *objectId = [NSString stringWithFormat:@"%@",[[UserInfo sharedUserInfo].userLoginTypeDTO objectForKey:@"objectId"]];
//    NSDictionary *tempDict = [NSDictionary dictionaryWithObjectsAndKeys:self.resUrl,@"url",@[objectId],@"groupIds",@([UserInfo sharedUserInfo].id),@"personId", nil];
    NSDictionary *tempDict = [NSDictionary dictionaryWithObjectsAndKeys:self.resUrl,@"url",@[objectId],@"groupIds",[UserInfo sharedUserInfo].uuidCode,@"personId", nil];
    self.newpersonTask = [[NetworkHelp shareNetwork] postNetwork:tempDict serverAPIUrl:checkFaceNewperson completionBlock:^(NSDictionary *dict, NetworkResultModel *resultModel) {
        
        if ([dict[@"code"] integerValue] == 0) {
             [self modificationUserInfo];
            if (self.delegate &&[self.delegate respondsToSelector:@selector(getCheckNetWorkWithDict:)]) {
                [self.delegate getCheckNetWorkWithDict:tempDict];
            }
            [self dismissViewControllerAnimated:YES completion:nil];
        }else{
            
            [KyoUtil showMessageHUD:@"人脸检测失败" withTimeInterval:1.2 inView:self.view];
            self.takeButton.userInteractionEnabled = YES;
        }
        
//        if ([NetworkHelp checkDataFromNetwork:dict showAlertView:YES]) {
//            NSLog(@"添加成功");
//            //修改跟人信息
//
//
//        }
    } errorBlock:^(NSError *error) {
        self.takeButton.userInteractionEnabled = YES;
    } finishedBlock:^(NSError *error) {
        
    }];
    
}
- (void)modificationUserInfo {
    NSDictionary *dict = [NSDictionary dictionary];
    if ([[[UserInfo sharedUserInfo].userLoginTypeDTO objectForKey:@"loginType"] integerValue] == 3) {
        dict = @{@"userId":@([UserInfo sharedUserInfo].id),
                 @"userFace":self.resUrl,
                 @"loginType":[[UserInfo sharedUserInfo].userLoginTypeDTO objectForKey:@"loginType"]
                 };
    }else{
        dict = @{@"userId":@([UserInfo sharedUserInfo].id),
                 @"userFace":self.resUrl,
                 @"objectId":[[UserInfo sharedUserInfo].userLoginTypeDTO objectForKey:@"objectId"],
                 @"loginType":[[UserInfo sharedUserInfo].userLoginTypeDTO objectForKey:@"loginType"]
                 };
    }
    
    NSString *urls = checkModifyUserInfo;
    [KyoUtil  clearOperation:self.modificationTask];
    self.modificationTask = [[NetworkHelp shareNetwork] postNetwork:dict serverAPIUrl:urls completionBlock:^(NSDictionary *dict, NetworkResultModel *resultModel) {
        if ([NetworkHelp checkDataFromNetwork:dict showAlertView:YES]) {
            [UserInfo sharedUserInfo].userFace = self.resUrl;
        }
    } errorBlock:^(NSError *error) {
        
    } finishedBlock:^(NSError *error) {
    }];
}

- (void)cancelButtonClick:(UIButton *)sender {
    [self dismissViewControllerAnimated:YES completion:nil];
}
-(void)pop
{
    if (self.navigationController) {
        [self.navigationController popViewControllerAnimated:YES];
    }else{
        [self dismissViewControllerAnimated:YES completion:nil];
    }
}
- (void)showMsgWithTitle:(NSString *)title andContent:(NSString *)content
{
    UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:title message:content delegate:nil cancelButtonTitle:@"确定" otherButtonTitles:nil];
    [alertView show];
    alertView.delegate = self;
    alertView.tag = 1;
}

- (void)alertView:(UIAlertView *)alertView didDismissWithButtonIndex:(NSInteger)buttonIndex {
    
    if (alertView.tag == 1) {
        [self dismissViewControllerAnimated:YES completion:nil];
    }
    
}

- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

/*
#pragma mark - Navigation

// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
    // Get the new view controller using [segue destinationViewController].
    // Pass the selected object to the new view controller.
}
*/

@end

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值