获取图片中指定区域图片

最近在搞直接一个类似于二维码的东西,同样也是需要获取其中某个区域的图片。直接上最为主要的一些代码吧。

下面这个是初始化AV部分,这样就可以将图像在view上面展示了。这里简单的阐述一下在这其中碰到的问题和解决方法。

1.如果在layer上面搞出一个“洞 ”,就是真正的裁剪区域,在这里用的是CAShapeLayer,利用fillMode,这样就可以通过mask方式作用在将覆盖在perviewLayer上面的coverLayer了。

2. 我们可以很容易的拿到整个的image,就可以在delegate中的sampleBuffer中拿到了。这里我使用的是AVCaptureVideoDataOutput,这样就可以不断的获取到采样的流了。

3. 从整个image中拿到裁剪区域中的图片。在这个问题上面花了不少时间和心思始终不能正确的拿到裁剪区域的图像。先是用了CGImageFromImage ,一个是才出来的图片位置和大小不对。之后转用cgcontext的方式。但是还是不太对。不断的使用google啊,怎么搞呢,琢磨啊。因为刚开始layer的呈现方式是fill的,这样实际的图片大小并不是和屏幕的大小是一样的。思前想后,可以确定是这个问题了,然后开始吧。针对不同的videoGravity的方式计算出裁剪区域实际在图片中对象的位置和大小,于是就有了一个calcRect的方法,这个方法就是将之前在屏幕上挖出来的“洞”对应到图片中的位置去。



总算是搞出来了。有兴趣的看看吧。


  1. <pre name="code" class="objc">//  
  2. //  ScanView.m  
  3. //  xxoo  
  4. //  
  5. //  Created by Tommy on 13-11-6.  
  6. //  Copyright (c) 2013年 Tommy. All rights reserved.  
  7. //  
  8.   
  9. #import "ScanView.h"  
  10. #import <AVFoundation/AVFoundation.h>  
  11.   
  12.   
  13. static inline double radians (double degrees) {return degrees * M_PI/180;}  
  14.   
  15. @interface ScanView()<AVCaptureVideoDataOutputSampleBufferDelegate>  
  16.   
  17. @property AVCaptureVideoPreviewLayer* previewLayer;  
  18. @property AVCaptureSession* session;  
  19. @property AVCaptureDevice* videoDevice;  
  20. @property dispatch_queue_t camera_sample_queue;  
  21. @property CALayer* coverLayer;  
  22. @property CAShapeLayer* cropLayer;  
  23. @property CALayer* stillImageLayer;  
  24. @property  AVCaptureStillImageOutput* stillImageOutput;  
  25.   
  26. @property UIImageView* stillImageView;  
  27. @property UIImage* cropImage;  
  28.   
  29. @property BOOL hasSetFocus;  
  30.   
  31.   
  32.   
  33. @end  
  34.   
  35. @implementation ScanView  
  36.   
  37. - (id)initWithFrame:(CGRect)frame  
  38. {  
  39.     self = [super initWithFrame:frame];  
  40.     if (self) {  
  41.         // Initialization code  
  42.         self.hasSetFocus = NO;  
  43.         [self initAVCaptuer];  
  44.         [self initOtherLayers];  
  45.     }  
  46.     return self;  
  47. }  
  48.   
  49. /* 
  50. // Only override drawRect: if you perform custom drawing. 
  51. // An empty implementation adversely affects performance during animation. 
  52. - (void)drawRect:(CGRect)rect 
  53. { 
  54.     // Drawing code 
  55. } 
  56. */  
  57. -(void)layoutSubviews  
  58. {  
  59.     [self.previewLayer setFrame:self.bounds];  
  60.     [self.coverLayer setFrame:self.bounds];  
  61.     self.coverLayer.mask = self.cropLayer;  
  62. }  
  63.   
  64. - (void) initAVCaptuer{  
  65.       
  66.     self.cropRect = CGRectZero;  
  67.       
  68.     self.videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];  
  69.     AVCaptureDeviceInput* input = [[AVCaptureDeviceInput alloc]initWithDevice:self.videoDevice error:nil];  
  70.       
  71.     AVCaptureVideoDataOutput* output = [[AVCaptureVideoDataOutput alloc]init];  
  72.     output.alwaysDiscardsLateVideoFrames = YES;  
  73.     self.camera_sample_queue = dispatch_queue_create ("com.scan.video.sample_queue", DISPATCH_QUEUE_SERIAL);  
  74.     [output setSampleBufferDelegate:self queue:self.camera_sample_queue];  
  75.       
  76.     NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;  
  77.     NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];  
  78.     NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];  
  79.     [output setVideoSettings:videoSettings];  
  80.       
  81.       
  82.     self.stillImageOutput = [[AVCaptureStillImageOutput alloc]init];  
  83.     NSDictionary* outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};  
  84.     [self.stillImageOutput setOutputSettings:outputSettings];  
  85.       
  86.     self.session = [[AVCaptureSession alloc]init];  
  87.     self.session.sessionPreset = AVCaptureSessionPresetMedium;  
  88.       
  89.     if ([self.session canAddInput:input])  
  90.     {  
  91.         [self.session addInput:input];  
  92.           
  93.         if ([self.session canAddOutput:output])  
  94.         {  
  95.             [self.session addOutput:self.stillImageOutput];  
  96.             [self.session addOutput:output];  
  97.               
  98.             self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];  
  99.             self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;  
  100.             
  101.             [self.layer addSublayerself.previewLayer];  
  102.               
  103.             return// success  
  104.         }  
  105.     }  
  106.       
  107.     self.session = nil;  
  108. }  
  109.   
  110. - (void)setCropRect:(CGRect)cropRect  
  111. {  
  112.     _cropRect = cropRect;  
  113.     if(!CGRectEqualToRect(CGRectZero, self.cropRect)){  
  114.   
  115.         self.cropLayer = [[CAShapeLayer alloc] init];  
  116.         CGMutablePathRef path = CGPathCreateMutable();  
  117.           
  118.         CGPathAddRect(path, nilself.cropRect);  
  119.         CGPathAddRect(path, nilself.bounds);  
  120.           
  121.         [self.cropLayer setFillRule:kCAFillRuleEvenOdd];  
  122.         [self.cropLayer setPath:path];  
  123.         [self.cropLayer setFillColor:[[UIColor whiteColor] CGColor]];  
  124.           
  125.         [self.cropLayer setNeedsDisplay];  
  126.           
  127.         //[self setVideoFocus];  
  128.           
  129.     }  
  130.       
  131.     [self.stillImageLayer setFrame:CGRectMake(100450, CGRectGetWidth(cropRect), CGRectGetHeight(cropRect))];  
  132. }  
  133.   
  134. - (void) setVideoFocus{  
  135.       
  136.     NSError *error;  
  137.     CGPoint foucsPoint = CGPointMake(CGRectGetMidX(self.cropRect), CGRectGetMidY(self.cropRect));  
  138.     if([self.videoDevice isFocusPointOfInterestSupported]  
  139.        &&[self.videoDevice lockForConfiguration:&error] &&!self.hasSetFocus){  
  140.         self.hasSetFocus = YES;  
  141.         [self.videoDevice setFocusPointOfInterest:[self convertToPointOfInterestFromViewCoordinates:foucsPoint]];  
  142.         [self.videoDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];  
  143.         [self.videoDevice unlockForConfiguration];  
  144.     }  
  145. //    [self.videoDevice setFocusMode:AVCaptureFocusModeAutoFocus];  
  146.     NSLog(@"error:%@",error);  
  147.       
  148. }  
  149.   
  150.   
  151. - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates  
  152. {  
  153.     CGPoint pointOfInterest = CGPointMake(.5f.5f);  
  154.     CGSize frameSize = self.frame.size;  
  155.       
  156.     AVCaptureVideoPreviewLayer *videoPreviewLayer = self.previewLayer;  
  157.       
  158.     if ([self.previewLayer isMirrored]) {  
  159.         viewCoordinates.x = frameSize.width - viewCoordinates.x;  
  160.     }  
  161.       
  162.     if ( [[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResize] ) {  
  163.         pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height1.f - (viewCoordinates.x / frameSize.width));  
  164.     } else {  
  165.         CGRect cleanAperture;  
  166.         for (AVCaptureInputPort *port in [[[[self session] inputs] lastObject] ports]) {  
  167.             if ([port mediaType] == AVMediaTypeVideo) {  
  168.                 cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES);  
  169.                 CGSize apertureSize = cleanAperture.size;  
  170.                 CGPoint point = viewCoordinates;  
  171.                   
  172.                 CGFloat apertureRatio = apertureSize.height / apertureSize.width;  
  173.                 CGFloat viewRatio = frameSize.width / frameSize.height;  
  174.                 CGFloat xc = .5f;  
  175.                 CGFloat yc = .5f;  
  176.                   
  177.                 if ( [[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResizeAspect] ) {  
  178.                     if (viewRatio > apertureRatio) {  
  179.                         CGFloat y2 = frameSize.height;  
  180.                         CGFloat x2 = frameSize.height * apertureRatio;  
  181.                         CGFloat x1 = frameSize.width;  
  182.                         CGFloat blackBar = (x1 - x2) / 2;  
  183.                         if (point.x >= blackBar && point.x <= blackBar + x2) {  
  184.                             xc = point.y / y2;  
  185.                             yc = 1.f - ((point.x - blackBar) / x2);  
  186.                         }  
  187.                     } else {  
  188.                         CGFloat y2 = frameSize.width / apertureRatio;  
  189.                         CGFloat y1 = frameSize.height;  
  190.                         CGFloat x2 = frameSize.width;  
  191.                         CGFloat blackBar = (y1 - y2) / 2;  
  192.                         if (point.y >= blackBar && point.y <= blackBar + y2) {  
  193.                             xc = ((point.y - blackBar) / y2);  
  194.                             yc = 1.f - (point.x / x2);  
  195.                         }  
  196.                     }  
  197.                 } else if ([[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResizeAspectFill]) {  
  198.                     if (viewRatio > apertureRatio) {  
  199.                         CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);  
  200.                         xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;  
  201.                         yc = (frameSize.width - point.x) / frameSize.width;  
  202.                     } else {  
  203.                         CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);  
  204.                         yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);  
  205.                         xc = point.y / frameSize.height;  
  206.                     }  
  207.                       
  208.                 }  
  209.                   
  210.                 pointOfInterest = CGPointMake(xc, yc);  
  211.                 break;  
  212.             }  
  213.         }  
  214.     }  
  215.       
  216.     return pointOfInterest;  
  217. }  
  218.   
  219. - (void) initOtherLayers{  
  220.     self.coverLayer = [CALayer layer];  
  221.       
  222.     self.coverLayer.backgroundColor = [[[UIColor blackColor] colorWithAlphaComponent:0.6] CGColor];  
  223.     [self.layer addSublayer:self.coverLayer];  
  224.       
  225.     if(!CGRectEqualToRect(CGRectZero, self.cropRect)){  
  226.       
  227.         self.cropLayer = [[CAShapeLayer alloc] init];  
  228.         CGMutablePathRef path = CGPathCreateMutable();  
  229.           
  230.         CGPathAddRect(path, nilself.cropRect);  
  231.         CGPathAddRect(path, nilself.bounds);  
  232.           
  233.         [self.cropLayer setFillRule:kCAFillRuleEvenOdd];  
  234.         [self.cropLayer setPath:path];  
  235.         [self.cropLayer setFillColor:[[UIColor redColor] CGColor]];  
  236.     }  
  237.       
  238.     self.stillImageLayer = [CALayer layer];  
  239.     self.stillImageLayer.backgroundColor = [[UIColor yellowColor] CGColor];  
  240.     self.stillImageLayer.contentsGravity = kCAGravityResizeAspect;  
  241.     [self.coverLayer addSublayer:self.stillImageLayer];  
  242.       
  243.       
  244.     self.stillImageView = [[UIImageView alloc]initWithFrame:CGRectMake(0,300100100)];  
  245.     self.stillImageView.backgroundColor = [UIColor redColor];  
  246.     self.stillImageView.contentMode = UIViewContentModeScaleAspectFit;  
  247.     [self addSubview:self.stillImageView];  
  248.       
  249.       
  250.     self.previewLayer.contentsGravity = kCAGravityResizeAspect;  
  251.       
  252. }  
  253.   
  254. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{  
  255.       
  256.       
  257.     [self setVideoFocus];  
  258.       
  259.     UIImage *image = [self imageFromSampleBuffer:sampleBuffer];  
  260.     self.cropImage = [self cropImageInRect:image];  
  261.       
  262.     dispatch_async(dispatch_get_main_queue(), ^{  
  263.           
  264.        [self.stillImageView setImage:image];  
  265.       // [self.stillImageLayer setContents:(id)[self.cropImage CGImage]];  
  266.     });  
  267.       
  268. }  
  269. // 通过抽样缓存数据创建一个UIImage对象  
  270. - (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer  
  271. {  
  272.     // 为媒体数据设置一个CMSampleBuffer的Core Video图像缓存对象  
  273.     CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);  
  274.     // 锁定pixel buffer的基地址  
  275.     CVPixelBufferLockBaseAddress(imageBuffer, 0);  
  276.       
  277.     // 得到pixel buffer的基地址  
  278.     voidvoid *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);  
  279.       
  280.     // 得到pixel buffer的行字节数  
  281.     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);  
  282.     // 得到pixel buffer的宽和高  
  283.     size_t width = CVPixelBufferGetWidth(imageBuffer);  
  284.     size_t height = CVPixelBufferGetHeight(imageBuffer);  
  285.       
  286.     //NSLog(@"%zu,%zu",width,height);  
  287.       
  288.     // 创建一个依赖于设备的RGB颜色空间  
  289.     CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();  
  290.       
  291.     // 用抽样缓存的数据创建一个位图格式的图形上下文(graphics context)对象  
  292.     CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,  
  293.                                                  bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);  
  294.       
  295.   
  296.     // 根据这个位图context中的像素数据创建一个Quartz image对象  
  297.     CGImageRef quartzImage = CGBitmapContextCreateImage(context);  
  298.     // 解锁pixel buffer  
  299.     CVPixelBufferUnlockBaseAddress(imageBuffer,0);  
  300.       
  301.     // 释放context和颜色空间  
  302.     CGContextRelease(context);  
  303.     CGColorSpaceRelease(colorSpace);  
  304.       
  305. //    cgimageget`  
  306.       
  307.     // 用Quartz image创建一个UIImage对象image  
  308.     //UIImage *image = [UIImage imageWithCGImage:quartzImage];  
  309.     UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0f orientation:UIImageOrientationRight];  
  310.       
  311.     // 释放Quartz image对象  
  312.     CGImageRelease(quartzImage);  
  313.       
  314.     return (image);  
  315.       
  316.       
  317. }  
  318.   
  319.   
  320.   
  321. - (CGRect) calcRect:(CGSize)imageSize{  
  322.     NSString* gravity = self.previewLayer.videoGravity;  
  323.     CGRect cropRect = self.cropRect;  
  324.     CGSize screenSize = self.previewLayer.bounds.size;  
  325.       
  326.     CGFloat screenRatio = screenSize.height / screenSize.width ;  
  327.     CGFloat imageRatio = imageSize.height /imageSize.width;  
  328.       
  329.     CGRect presentImageRect = self.previewLayer.bounds;  
  330.     CGFloat scale = 1.0;  
  331.       
  332.       
  333.     if([AVLayerVideoGravityResizeAspect isEqual: gravity]){  
  334.           
  335.         CGFloat presentImageWidth = imageSize.width;  
  336.         CGFloat presentImageHeigth = imageSize.height;  
  337.         if(screenRatio > imageRatio){  
  338.             presentImageWidth = screenSize.width;  
  339.             presentImageHeigth = presentImageWidth * imageRatio;  
  340.               
  341.         }else{  
  342.             presentImageHeigth = screenSize.height;  
  343.             presentImageWidth = presentImageHeigth / imageRatio;  
  344.         }  
  345.           
  346.         presentImageRect.size = CGSizeMake(presentImageWidth, presentImageHeigth);  
  347.         presentImageRect.origin = CGPointMake((screenSize.width-presentImageWidth)/2.0, (screenSize.height-presentImageHeigth)/2.0);  
  348.       
  349.     }else if([AVLayerVideoGravityResizeAspectFill isEqual:gravity]){  
  350.           
  351.         CGFloat presentImageWidth = imageSize.width;  
  352.         CGFloat presentImageHeigth = imageSize.height;  
  353.         if(screenRatio > imageRatio){  
  354.             presentImageHeigth = screenSize.height;  
  355.             presentImageWidth = presentImageHeigth / imageRatio;  
  356.         }else{  
  357.             presentImageWidth = screenSize.width;  
  358.             presentImageHeigth = presentImageWidth * imageRatio;  
  359.         }  
  360.           
  361.         presentImageRect.size = CGSizeMake(presentImageWidth, presentImageHeigth);  
  362.         presentImageRect.origin = CGPointMake((screenSize.width-presentImageWidth)/2.0, (screenSize.height-presentImageHeigth)/2.0);  
  363.           
  364.     }else{  
  365.         NSAssert(0@"dont support:%@",gravity);  
  366.     }  
  367.       
  368.     scale = CGRectGetWidth(presentImageRect) / imageSize.width;  
  369.       
  370.     CGRect rect = cropRect;  
  371.     rect.origin = CGPointMake(CGRectGetMinX(cropRect)-CGRectGetMinX(presentImageRect), CGRectGetMinY(cropRect)-CGRectGetMinY(presentImageRect));  
  372.       
  373.     rect.origin.x /= scale;  
  374.     rect.origin.y /= scale;  
  375.     rect.size.width /= scale;  
  376.     rect.size.height  /= scale;  
  377.       
  378.     return rect;  
  379. }  
  380.   
  381. #define SUBSET_SIZE 360  
  382.   
  383. - (UIImage*) cropImageInRect:(UIImage*)image{  
  384.   
  385.     CGSize size = [image size];  
  386.     CGRect cropRect = [self calcRect:size];  
  387.   
  388.     float scale = fminf(1.0f, fmaxf(SUBSET_SIZE / cropRect.size.width, SUBSET_SIZE / cropRect.size.height));  
  389.     CGPoint offset = CGPointMake(-cropRect.origin.x, -cropRect.origin.y);  
  390.       
  391.     size_t subsetWidth = cropRect.size.width * scale;  
  392.     size_t subsetHeight = cropRect.size.height * scale;  
  393.       
  394.       
  395.     CGColorSpaceRef grayColorSpace = CGColorSpaceCreateDeviceGray();  
  396.       
  397.     CGContextRef ctx =  
  398.     CGBitmapContextCreate(nil,  
  399.                           subsetWidth,  
  400.                           subsetHeight,  
  401.                           8,  
  402.                           0,  
  403.                           grayColorSpace,  
  404.                           kCGImageAlphaNone|kCGBitmapByteOrderDefault);  
  405.     CGColorSpaceRelease(grayColorSpace);  
  406.     CGContextSetInterpolationQuality(ctx, kCGInterpolationNone);  
  407.     CGContextSetAllowsAntialiasing(ctx, false);  
  408.   
  409.     // adjust the coordinate system  
  410.     CGContextTranslateCTM(ctx, 0.0, subsetHeight);  
  411.     CGContextScaleCTM(ctx, 1.0, -1.0);  
  412.       
  413.       
  414.     UIGraphicsPushContext(ctx);  
  415.     CGRect rect = CGRectMake(offset.x * scale, offset.y * scale, scale * size.widthscale * size.height);  
  416.   
  417.     [image drawInRect:rect];  
  418.       
  419.     UIGraphicsPopContext();  
  420.       
  421.     CGContextFlush(ctx);  
  422.       
  423.       
  424.     CGImageRef subsetImageRef = CGBitmapContextCreateImage(ctx);  
  425.       
  426.     UIImage* subsetImage = [UIImage imageWithCGImage:subsetImageRef];  
  427.   
  428.     CGImageRelease(subsetImageRef);  
  429.       
  430.     CGContextRelease(ctx);  
  431.   
  432.       
  433.     return subsetImage;  
  434. }    
  435.   
  436.   
  437.   
  438. - (void) start{  
  439.       
  440.     dispatch_sync (self.camera_sample_queue, ^{  
  441.         [self.session startRunning]; });  
  442.       
  443. }  
  444. - (void) stop{  
  445.     if(self.session){  
  446.         [self.session stopRunning];  
  447.     }  
  448.       
  449. }  
  450.   
  451.   
  452. @end  
  453. </pre><br><br>  
  • 4
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值