在iOS7之前,大部分应用中使用的二维码扫描是第三方的扫描框架,例如ZXing或者ZBar。使用时集成麻烦,出错也不方便调试。在iOS7之后,苹果自身提供了二维码的扫描功能,从效率上来说,原生的二维码远高于这些第三方框架。
二维码扫描需要获取摄像头并读取照片信息,因此我们需要导入系统的AVFoundation框架。
我们需要用到以下几个类:
AVCaptureSession 会话对象,此类作为硬件设备输入输出信息的桥梁,承担实时获取设备数据的责任。
AVCaptureDeviceInput 设备输入类,这个类用来表示输入数据的硬件设备。
AVCaptureMetadataOutput 输出类,这个类支持二维码、条形码等图像数据的识别。
AVCaptureVideoPreviewLayer 图层类,用来快速呈现摄像头获取的原始数据。
二维码扫描的步骤:
1、创建设备会话对象,用来设置设备数据输入
2、获取摄像头,并且将摄像头对象加入当前会话中
3、实时获取摄像头原始数据显示在屏幕上
4、扫描到二维码/条形码数据,通过协议方法回调
扫描范围:
在AVCaptureMetadataOutput中有一个叫做rectOfInterest的CGRect类型属性,这个属性用来限制扫描范围。 这个属性的每一个值取值范围在0~1之间,代表的是对应轴上的比例大小。这个属性是以屏幕右上角为坐标原点,并且宽高的顺序要对换过来,如下图所示。
注意:需要在info.plist 文件中添加使用相机权限
<key>NSCameraUsageDescription</key>
<string>是否允许使用相机进行拍照或扫码</string>
示例代码
#define Screen_Width [UIScreen mainScreen].bounds.size.width
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface ViewController ()<AVCaptureMetadataOutputObjectsDelegate>
{
UIView* scanBgView;
UILabel *stateLabel;
UIButton *controlBtn;
UIView* boxView;//扫描框
CALayer* scanLayer;//扫描线
BOOL isReading;//是否在扫描
NSTimer* timer;
}
//会话对象,此类作为硬件设备输入输出信息的桥梁,承担实时获取设备数据的责任
@property (nonatomic, strong) AVCaptureSession *captureSession;
//图层类,用来快速呈现摄像头获取的原始数据
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@end
@implementation ViewController
//开始扫描
-(void)startReading{
NSError *error;
//初始化捕捉设备(AVCaptureDevice),类型为AVMediaTypeVideo
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//创建AVCaptureDeviceInput输入设备为手机摄像头
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (!input) {
NSLog(@"❌%@", [error localizedDescription]);
return;
}
//输出类,这个类支持二维码、条形码等图像数据的识别
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
//创建串行队列,并将输出类添加到队列当中
dispatch_queue_t dispatchQueue;
dispatchQueue = dispatch_queue_create("myQueue", NULL);
//设置代理
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];
//实例化会话对象,此类作为硬件设备输入输出信息的桥梁,承担实时获取设备数据的责任
_captureSession = [[AVCaptureSession alloc] init];
//将输入类添加到会话
[_captureSession addInput:input];
//将输出类添加到会话中
[_captureSession addOutput:captureMetadataOutput];
//设置输出类媒体数据类型为QRCode(二维码)
[captureMetadataOutput setMetadataObjectTypes:[NSArray arrayWithObject:AVMetadataObjectTypeQRCode]];
//设置扫描范围
captureMetadataOutput.rectOfInterest = CGRectMake(0.2f, 0.2f, 0.8f, 0.8f);
//创建AVCaptureVideoPreviewLayer对象(预览图层)来实时获取摄像头图像
_videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
//设置该图层填充方式
[_videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//设置图层的frame
[_videoPreviewLayer setFrame:scanBgView.layer.bounds];
//将图层添加到scanBgView的图层上,把摄像头获取的图像实时展示在屏幕上
[scanBgView.layer addSublayer:_videoPreviewLayer];
//扫描框
boxView = [[UIView alloc] initWithFrame:CGRectMake(scanBgView.bounds.size.width * 0.2f, scanBgView.bounds.size.height * 0.2f,scanBgView.bounds.size.width * 0.6f,scanBgView.bounds.size.height * 0.6f)];
boxView.layer.borderColor = [UIColor greenColor].CGColor;
boxView.layer.borderWidth = 1.0f;
[scanBgView addSubview:boxView];
//扫描线
scanLayer = [[CALayer alloc] init];
scanLayer.frame = CGRectMake(0, 0, boxView.bounds.size.width, 1);
scanLayer.backgroundColor = [UIColor brownColor].CGColor;
[boxView.layer addSublayer:scanLayer];
//定时器
timer = [NSTimer scheduledTimerWithTimeInterval:0.2f target:self selector:@selector(moveScanLayer) userInfo:nil repeats:YES];
[timer fire];
//开始扫描
[_captureSession startRunning];
isReading = YES;
}
//实现计时器方法moveScanLayer
- (void)moveScanLayer{
CGRect frame = scanLayer.frame;
if (boxView.bounds.size.height < scanLayer.frame.origin.y) {
frame.origin.y = 0;
scanLayer.frame = frame;
}else{
frame.origin.y += 5;
[UIView animateWithDuration:0.1 animations:^{
scanLayer.frame = frame;
}];
}
}
//暂停扫描
-(void)stopReading{
//停止扫描
[_captureSession stopRunning];
isReading = NO;
_captureSession = nil;
[_videoPreviewLayer removeFromSuperlayer];
[scanLayer removeFromSuperlayer];
//销毁定时器
[timer invalidate];
timer = nil;
}
- (void)viewDidLoad {
[super viewDidLoad];
scanBgView = [[UIView alloc] initWithFrame:CGRectMake(0, 120, Screen_Width, 250)];
scanBgView.backgroundColor = [UIColor lightGrayColor];
[self.view addSubview:scanBgView];
controlBtn = [[UIButton alloc] initWithFrame:CGRectMake((Screen_Width-100)/2, CGRectGetMaxY(scanBgView.frame)+20, 100, 18)];
controlBtn.backgroundColor = [UIColor clearColor];
[controlBtn setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
controlBtn.titleLabel.font = [UIFont systemFontOfSize:15];
[controlBtn setTitle:@"暂停扫描" forState:UIControlStateNormal];
[controlBtn addTarget:self action:@selector(startOrStopReading:) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:controlBtn];
stateLabel = [[UILabel alloc] initWithFrame:CGRectMake((Screen_Width-300)/2, (250-18)/2, 300, 18)];
stateLabel.backgroundColor = [UIColor clearColor];
stateLabel.font = [UIFont systemFontOfSize:12];
stateLabel.textAlignment = NSTextAlignmentCenter;
[scanBgView addSubview:stateLabel];
[self startReading];
}
- (void)startOrStopReading:(UIButton*)sender {
if (!isReading) {
[self startReading];
[controlBtn setTitle:@"暂停扫描" forState:UIControlStateNormal];
[stateLabel setText:@""];
}else{
[self stopReading];
[controlBtn setTitle:@"开始扫描" forState:UIControlStateNormal];
}
}
#pragma mark - AVCaptureMetadataOutputObjectsDelegate
//获取扫描得到的数据。回调参数metadataObjects中存放了扫描结果,我们需要先判断这个数组的数据个数是否为0
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
//判断是否有数据
if (metadataObjects != nil && [metadataObjects count] > 0) {
AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects objectAtIndex:0];
//判断回传的数据类型
if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeQRCode]) {
[stateLabel performSelectorOnMainThread:@selector(setText:) withObject:[metadataObj stringValue] waitUntilDone:NO];
[self performSelectorOnMainThread:@selector(stopReading) withObject:nil waitUntilDone:NO];
}
}
}
@end