自定义相机,人脸追踪

输入设备和输出设备由session链接

//创建session对象

    _captureSession = [AVCaptureSession new];

    //创建输入设备对象

    _captureDevice = [self getDeviceOfBackCamera];

    _captureDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:_captureDevice error:nil];

    _captureSession.sessionPreset = AVCaptureSessionPresetHigh;

    if ([_captureSession canAddInput:_captureDeviceInput]) {

        [_captureSession addInput:_captureDeviceInput];

    }

    

    //创建输出设备对象

    _captureStillImageOutput = [AVCaptureStillImageOutput new];

    NSDictionary *settingDic = @{AVVideoCodecKey:AVVideoCodecJPEG};

    [_captureStillImageOutput setOutputSettings:settingDic];

    if ([_captureSession canAddOutput:_captureStillImageOutput]) {

        [_captureSession addOutput:_captureStillImageOutput];

    }

//如果是视频输出则用

//    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc]init];

//    [output setVideoSettings:@{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)}];

//    [output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];

//    if([_captureSession canAddOutput:output]) {

//        [_captureSession addOutput:output];

//    }

//配置可以识别人脸的对象

    AVCaptureMetadataOutput* metadataOutput = [AVCaptureMetadataOutput new];

    [metadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];

    if ([_captureSession canAddOutput:metadataOutput]) {

        [_captureSession addOutput:metadataOutput];

    }

    metadataOutput.metadataObjectTypes = @[AVMetadataObjectTypeFace];

    

    //获取链接

    NSArray *captureConnectionArray = _captureStillImageOutput.connections;

    for (AVCaptureConnection *connection in captureConnectionArray) {

        for (AVCaptureInputPort* inputPort in [connection inputPorts]) {

            if ([[inputPort mediaType] isEqualToString:AVMediaTypeVideo]) {

                _caputureConnection = connection;

                break;

            }

        }

        if (_caputureConnection) {

            if ([_caputureConnection isVideoStabilizationSupported]) {

                _caputureConnection.preferredVideoStabilizationMode=AVCaptureVideoStabilizationModeAuto;

            }

            break;

        }

[self previewLayerFrame:[UIScreen mainScreen].bounds];

    }

//创建预览图层显示拍摄的的画面

- (void)previewLayerFrame:(CGRect)rect{

    if(!_captureVideoPreviewLayer) {

        //创建预览图层

        _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:_captureSession];

        _captureVideoPreviewLayer.frame = rect;

        _captureVideoPreviewLayer.masksToBounds = NO;

        _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;

        [_captureSession startRunning];

    }

}

//设置调用前置还是后置摄像头

- (void)setAVCapturePosition:(AVCaptureDevicePosition)captureDevicePosition {

    //改变会话的配置前一定要先开启配置,配置完成后提交配置改变

    [self.captureSession beginConfiguration];

    //移除原有输入对象

    [self.captureSession removeInput:self.captureDeviceInput];

    

    switch (captureDevicePosition) {

        case AVCaptureDevicePositionUnspecified:

            NSLog(@"设备没有摄像头");

            break;

        case AVCaptureDevicePositionBack:

        {

            //获得要调整的设备输入对象

            AVCaptureDeviceInput *deviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:[self getDeviceOfBackCamera] error:nil];

            if ([self.captureSession canAddInput:deviceInput]) {

                [self.captureSession addInput:deviceInput];

                self.captureDeviceInput = deviceInput;

            }

        }

            break;

        case AVCaptureDevicePositionFront:

        {

            //获得要调整的设备输入对象

            AVCaptureDeviceInput *deviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:[self getDeviceOfFrontCamera] error:nil];

            if ([self.captureSession canAddInput:deviceInput]) {

                [self.captureSession addInput:deviceInput];

                self.captureDeviceInput = deviceInput;

            }


        }

            break;

        default:

            break;

    }

    

    //提交会话配置

    [self.captureSession commitConfiguration];

}

//拍照

- (void)takePhoto{

    AVCaptureConnection * videoConnection = [_captureStillImageOutput connectionWithMediaType:AVMediaTypeVideo];

    if (!videoConnection) {

        NSLog(@"take photo failed!");

        return;

    }

    [_captureStillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {

        [_captureSession stopRunning];

        NSData *dataJPEG= [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];

        [_delegate lxCamara:self didClickTakePhotoWithImageData:dataJPEG];

        [_captureSession startRunning];

    }];

    _isTakePhoto = YES;

}


//获取后置摄像头

- (AVCaptureDevice *)getDeviceOfBackCamera{

    NSArray *devicesArray = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];

    for (AVCaptureDevice *device in devicesArray) {

        if ([device position] == AVCaptureDevicePositionBack) {

            return device;

        }

    }

    return nil;

}

//获取前置摄像头

- (AVCaptureDevice *)getDeviceOfFrontCamera{

    NSArray *devicesArray = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];

    for (AVCaptureDevice *device in devicesArray) {

        if ([device position] == AVCaptureDevicePositionFront) {

            return device;

        }

    }

    return nil;

}

//判断设备是否存在

- (BOOL)isHaveCamaraType:(AVCaptureDevicePosition)position {

    NSArray *devicesArray = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];

    for (AVCaptureDevice *device in devicesArray) {

        if ([device position] == position) {

            return YES;

        }

    }

    return false;

}

//如果选择视频输出则用这个代理

//- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

//    if (_isTakePhoto) {

//        //    [_captureSession stopRunning];

//        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

//        

//        

//        // Lock the base address of the pixel buffer

//        CVPixelBufferLockBaseAddress(imageBuffer, 0);

//        

//        // Get the number of bytes per row for the pixel buffer

//        void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

//        

//        // Get the number of bytes per row for the pixel buffer

//        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

//        // Get the pixel buffer width and height

//        size_t width = CVPixelBufferGetWidth(imageBuffer);

//        size_t height = CVPixelBufferGetHeight(imageBuffer);

//        

//        // Create a device-dependent RGB color space

//        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

//        

//        

//        // Create a bitmap graphics context with the sample buffer data

//        CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);

//        

//        // Create a Quartz image from the pixel data in the bitmap graphics context

//        CGImageRef quartzImage = CGBitmapContextCreateImage(context);

//        // Unlock the pixel buffer

//        CVPixelBufferUnlockBaseAddress(imageBuffer,0);

//        

//        // Free up the context and color space

//        CGContextRelease(context);

//        CGColorSpaceRelease(colorSpace);

//        

//        // Create an image object from the Quartz image

//        UIImage *image = [UIImage imageWithCGImage:quartzImage];

//        

//        // Release the Quartz image

//        CGImageRelease(quartzImage);

//        

//        CGSize imgSize = CGSizeMake(image.size.width*image.scale, image.size.height*image.scale);

//        UIGraphicsBeginImageContextWithOptions(CGSizeMake(imgSize.height, imgSize.width), NO, image.scale);

//        CGContextRef context1 = UIGraphicsGetCurrentContext();

//        CGContextTranslateCTM(context1, imgSize.height/2, imgSize.width/2);

//        CGContextRotateCTM(context1, M_PI/2);

//        CGContextScaleCTM(context1, 1.0, -1.0);

//        CGContextTranslateCTM(context1, -imgSize.width/2, -imgSize.height/2);

//        CGContextDrawImage(context1, CGRectMake(0, 0, imgSize.width, imgSize.height), image.CGImage);

//        UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext();

//        UIGraphicsEndImageContext();

//        

//        UIImageWriteToSavedPhotosAlbum(outputImage,nil,nil,nil);

//        [_captureSession stopRunning];

//        _isTakePhoto = NO;

//    }

//

//}

//人脸追踪

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {

        CGRect firstRect = CGRectZero;

        CGRect lastRect = CGRectZero;

        for (int i=0; i<metadataObjects.count; i++) {

            AVMetadataObject *metadataObject = metadataObjects[i];

            if ([metadataObject.type isEqual:AVMetadataObjectTypeFace]) {

                AVMetadataFaceObject* face = (AVMetadataFaceObject*)metadataObject;

                

                CGRect faceRectangle = [face bounds];

                

                CGRect faceRect = [_captureVideoPreviewLayer rectForMetadataOutputRectOfInterest:faceRectangle];

                if (i==0) {

                    firstRect = faceRect;

                }else {

                    lastRect = faceRect;

                }

            }

        }

        [_faceDelegate firstRect:firstRect lastRect:lastRect];

}



  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值