看代码,同学
#import <UIKit/UIKit.h>
@interface EveryFrameCameraImageView : UIView
@property (nonatomic, copy) void(^getEveryFrameImage)(UIImage *image);
- (void)start;
- (void)stop;
@end
#import "EveryFrameCameraImageView.h"
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h>
@interface EveryFrameCameraImageView()<
AVCaptureVideoDataOutputSampleBufferDelegate
>
@property (nonatomic, strong) AVCaptureSession *session;
@property (nonatomic, strong) CALayer *cutomeLayer;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
@end
@implementation EveryFrameCameraImageView
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress,
width, height, 8, bytesPerRow, colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
orientation:UIImageOrientationRight];
CGImageRelease(newImage);
self.getEveryFrameImage(image);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}
#pragma mark - publicMethod
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
self.backgroundColor = [UIColor blackColor];
}
return self;
}
- (void)start{
[self.layer addSublayer:self.previewLayer];
[self.session startRunning];
}
- (void)stop{
[self.session stopRunning];
[self.previewLayer removeFromSuperlayer];
}
#pragma mark - privateMethod
- (AVCaptureVideoDataOutput *)getOutput{
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
return captureOutput;
}
#pragma mark - getter
- (AVCaptureSession *)session{
if (!_session) {
AVCaptureSession *tempSession = [AVCaptureSession new];
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] error:nil];
[tempSession addInput:captureInput];
[tempSession addOutput:[self getOutput]];
_session = tempSession;
}
return _session;
}
- (AVCaptureVideoPreviewLayer *)previewLayer{
if (!_previewLayer) {
AVCaptureVideoPreviewLayer *tempLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
tempLayer.frame = self.bounds;
_previewLayer = tempLayer;
}
return _previewLayer;
}
@end