1、關於二維碼掃描的第三方庫有不少:例如比較經常使用的兩個框架
1.ZBar SDK ZBar爲咱們提供了兩種使用方式,一種是直接調用ZBar提供的ZBarReaderViewController打開一個掃描界面,另外一種方式是使用ZBar提供的能夠嵌在其餘視圖中的ZBarReaderView,實際項目中咱們更可能會使用第二種方式,這可讓咱們對界面作更多的定製,詳細的百度查找相關文檔來看。iphone
2.ZXing(Github鏡像地址)是一個開源的條碼生成和掃描庫(開源協議爲Apache2.0)。它不但支持衆多的條碼格式,並且有各類語言的實現版本,它支持的語言包括:Java、 C++、 C#、 Objective-C、ActionScript以及Ruby;首先去Google Code或Github將ZXing的代碼下載下來,整個工程比較大,咱們只須要其中涉及iOS的部分,因此最好作一些裁剪。簡單來講,咱們只須要保留cpp和iphone這2個文件夾,其他的所有刪掉。ide
2、編碼
1.本次介紹的是:iOS原生二維碼掃描,利用iOS7自帶的AVFoundation Framework 來實現二維碼掃描atom
關於AVFoundation:AVFoundation是一個很大基礎庫,用來建立基於時間的視聽媒體,可使用它來檢查、建立、編輯或媒體文件,也能夠輸入流從設備和操做視頻實時捕捉和回放。詳細框架介紹見官網:About AV Foundation,本文只是介紹若是使用AVFoundation獲取二維碼。spa
2.首先獲取流媒體信息咱們須要AVCaptureSession對象來管理輸入流和輸出流,AVCaptureVideoPreviewLayer對象來顯示信息。線程
1>AVCaptureSession 管理輸入(AVCaptureInput)和輸出(AVCaptureOutput)流,包含開啓和中止會話方法。代理
2>AVCaptureDeviceInput 是AVCaptureInput的子類,能夠做爲輸入捕獲會話,用AVCaptureDevice實例初始化。rest
3>AVCaptureDevice 表明了物理捕獲設備如:攝像機。用於配置等底層硬件設置相機的自動對焦模式。code
4>AVCaptureMetadataOutput 是AVCaptureOutput的子類,處理輸出捕獲會話。捕獲的對象傳遞給一個委託實現
5>AVCaptureMetadataOutputObjectsDelegate協議。協議方法在指定的派發隊列(dispatch queue)上執行。
6>AVCaptureVideoPreviewLayerCALayer的一個子類,顯示捕獲到的相機輸出流。
1 /* 2 步驟以下: 3 1.導入AVFoundation框架,引入<AVFoundation/AVFoundation.h> 4 2.設置一個用於顯示掃描的view 5 3.實例化AVCaptureSession、AVCaptureVideoPreviewLayer 6 */ 7 8 #import "ViewController.h" 9 #import <AVFoundation/AVFoundation.h> 10 @interface ViewController ()<AVCaptureMetadataOutputObjectsDelegate> 11 12 @property (strong,nonatomic) UIView *viewPreview ; 13 @property (strong,nonatomic) UILabel *lblStatus ; 14 @property (strong,nonatomic) UIButton *startBtn ; 15 16 @property (strong,nonatomic) UIView *boxView ; 17 @property (nonatomic) BOOL isReading ; 18 @property (strong,nonatomic) CALayer *scanLayer; 19 20 -(BOOL)startReading; 21 -(void)stopReading ; 22 23 //捕捉會話 24 @property (strong,nonatomic) AVCaptureSession *captureSession; 25 //展現layer 26 @property (strong,nonatomic) AVCaptureVideoPreviewLayer *videoPreviewLayer; 27 28 @end 29 30 31 @implementation ViewController 32 33 - (void)viewDidLoad { 34 [super viewDidLoad]; 35 // self.view.backgroundColor = [UIColor whiteColor]; 36 self.captureSession = nil ; 37 _isReading = NO ; 38 [self initUI]; 39 40 41 } 42 43 -(BOOL)startReading 44 { 45 NSError *error; 46 //1.初始化捕捉設備(AVCaptureDevice),類型爲AVMediaTypeVideo 47 AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 48 //2.用captureDevice建立輸入流 49 AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error]; 50 if (!input) { 51 NSLog(@"%@",[error localizedDescription]); 52 return NO ; 53 } 54 //3.建立媒體數據輸出流 55 AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init]; 56 // AVCaptureOutput *outPut = [[AVCaptureOutput alloc] init]; 57 //4.實例化捕捉會話 58 _captureSession = [[AVCaptureSession alloc] init]; 59 //4.1將輸入流添加帶會話 60 [_captureSession addInput:input]; 61 //4.2將媒體輸出流添加帶會話中 62 [_captureSession addOutput:captureMetadataOutput]; 63 //5.建立串行隊列,並加媒體輸出流添加到隊列當中 64 dispatch_queue_t dispatchQueue; 65 dispatchQueue = dispatch_queue_create("myQueue", NULL); 66 //5.1.設置代理 67 // [captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue]; 68 // 69 //設置代理 刷新線程 70 [captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()]; 71 //5.2.設置輸出媒體數據類型爲QRCode 72 // [captureMetadataOutput setMetadataObjectTypes:[NSArray arrayWithObject:AVMetadataObjectTypeQRCode]]; 73 if (captureMetadataOutput) { 74 75 //設置掃碼支持的編碼格式 76 NSMutableArray *array = [[NSMutableArray alloc] initWithCapacity:0]; 77 78 if ([captureMetadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeQRCode]) { 79 [array addObject:AVMetadataObjectTypeQRCode]; 80 } 81 if ([captureMetadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeEAN13Code]) { 82 [array addObject:AVMetadataObjectTypeEAN13Code]; 83 } 84 if ([captureMetadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeEAN8Code]) { 85 [array addObject:AVMetadataObjectTypeEAN8Code]; 86 } 87 if ([captureMetadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeCode128Code]) { 88 [array addObject:AVMetadataObjectTypeCode128Code]; 89 } 90 captureMetadataOutput.metadataObjectTypes = array; 91 } 92 93 //6.實例化預覽圖層 94 _videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSessionWithNoConnection:_captureSession]; 95 //7.設置預覽圖層填充方式 96 [_videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 97 //8.設置圖層的frame 98 [_videoPreviewLayer setFrame:_viewPreview.layer.bounds]; 99 //9.將圖層添加到預覽view的圖層上 100 [_viewPreview.layer addSublayer:_videoPreviewLayer]; 101 //10.設置掃描範圍 102 captureMetadataOutput.rectOfInterest = CGRectMake(0.2f, 0.2f, 0.8f, 0.8f); 103 //10.1掃描框 104 CGFloat width = _viewPreview.bounds.size.width ; 105 CGFloat height = _viewPreview.bounds.size.height; 106 self.boxView = [[UIView alloc] initWithFrame:CGRectMake(width * 0.2f, height * 0.2f, width - width * 0.4f, height - height * 0.4f)]; 107 _boxView.layer.borderColor = [[UIColor greenColor] CGColor]; 108 _boxView.layer.borderWidth = 1.0f ; 109 [_viewPreview addSubview:_boxView]; 110 //10.2掃描線 111 self.scanLayer = [[CALayer alloc] init]; 112 _scanLayer.frame = CGRectMake(0, 0, _boxView.bounds.size.width, 1); 113 _scanLayer.backgroundColor = [[UIColor brownColor] CGColor]; 114 [_boxView.layer insertSublayer:_scanLayer atIndex:0]; 115 NSTimer *timer = [NSTimer scheduledTimerWithTimeInterval:0.2f target:self selector:@selector(moveScanLayer:) userInfo:nil repeats:YES]; 116 [timer fire]; 117 //10.開始掃描 118 [_captureSession startRunning]; 119 return YES ; 120 } 121 122 123 -(void)stopReading{ 124 [_captureSession stopRunning]; 125 _captureSession = nil; 126 [_scanLayer removeFromSuperlayer]; 127 [_videoPreviewLayer removeFromSuperlayer]; 128 } 129 130 #pragma mark --實現計時器方法moveScanLayer:(NSTimer *)timer 131 132 - (void)moveScanLayer:(NSTimer *)timer 133 { 134 CGRect frame = _scanLayer.frame; 135 if (_boxView.frame.size.height < _scanLayer.frame.origin.y) { 136 frame.origin.y = 0; 137 _scanLayer.frame = frame; 138 }else{ 139 frame.origin.y += 5; 140 [UIView animateWithDuration:0.1 animations:^{ 141 _scanLayer.frame = frame; 142 }]; 143 } 144 } 145 146 #pragma mark --初始化控件 147 -(void)initUI 148 { 149 self.viewPreview = [[UIView alloc] init]; 150 self.viewPreview.frame = CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height -200); 151 // self.viewPreview.backgroundColor = [UIColor clearColor]; 152 // self.viewPreview.alpha = 0.1f ; 153 [self.view addSubview:_viewPreview]; 154 155 self.startBtn = [UIButton buttonWithType:UIButtonTypeSystem]; 156 _startBtn.frame = CGRectMake(100, self.view.bounds.size.height - 100, 175, 30); 157 [_startBtn setTitle:@"開始" forState:UIControlStateNormal]; 158 [_startBtn addTarget:self action:@selector(startStopReading:) forControlEvents:UIControlEventTouchUpInside]; 159 [self.view addSubview:_startBtn]; 160 } 161 162 #pragma mark --實現開始和中止方法 163 -(void)startStopReading:(UIButton *)btn 164 { 165 if (!_isReading) { 166 167 if ([self startReading]) { 168 [_startBtn setTitle:@"中止" forState:UIControlStateNormal]; 169 } 170 } 171 else 172 { 173 [self stopReading]; 174 [_startBtn setTitle:@"開始" forState:UIControlStateNormal]; 175 } 176 177 _isReading = !_isReading ; 178 } 179 180 - (void)didReceiveMemoryWarning { 181 [super didReceiveMemoryWarning]; 182 // Dispose of any resources that can be recreated. 183 } 184 185 186 #pragma mark --實現AVCaptureMetadataOutputObjectsDelegate協議方法 187 -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection 188 { 189 if (metadataObjects.count > 0) { 190 AVMetadataMachineReadableCodeObject *metadataObject = [metadataObjects firstObject]; 191 NSLog(@"................%@",metadataObject.stringValue); 192 [self performSelectorOnMainThread:@selector(stopReading) withObject:nil waitUntilDone:NO]; 193 194 } 195 } 196 197 198 - (BOOL)shouldAutorotate 199 { 200 return NO; 201 } 202 @end