1 #import "ViewController.h" 2 #import <AVFoundation/AVFoundation.h> 3 4 @interface ViewController ()<AVCaptureMetadataOutputObjectsDelegate> 5 6 @property (nonatomic,strong)AVCaptureSession * session; 7 @property (nonatomic,strong)AVCaptureVideoPreviewLayer * preview; 8 @end 9 10 @implementation ViewController 11 12 - (void)viewDidLoad { 13 [super viewDidLoad]; 14 self.title = @"掃一掃"; 15 [self initQrCodeScanning]; 16 // Do any additional setup after loading the view, typically from a nib. 17 } 18 - (void)initQrCodeScanning{ 19 /** 20 掃描二維碼 大概的流程應該是:1.打開設備的攝像頭-->2.進行二維碼圖像捕獲-->3.獲取捕獲的圖像進行解析-->4.取得解析結果進行后續的處理。這些流程需要用到AVFoundation這個庫 21 */ 22 23 /** 24 上面完成了捕獲的設置,但是並未正在開始'掃描',要完成一次掃描的過程,需要用到AVCaptureSession這個類,這個session類把一次掃描看做一次會話,會話開始后才是正在的'掃描'開始,具體代碼如下。 25 */ 26 self.session = [[AVCaptureSession alloc] init]; 27 // [self.session setSessionPreset:AVCaptureSessionPresetHigh];//掃描的質量 28 //獲取攝像頭設備 29 AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 30 NSError *error = nil; 31 //創建輸入流 32 AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; 33 //拍完照片以后,需要一個AVCaptureMetadataOutput對象將獲取的'圖像'輸出,以便進行對其解析 34 AVCaptureMetadataOutput * output = [[AVCaptureMetadataOutput alloc] init]; 35 36 if ([self.session canAddInput:input]) { 37 [self.session addInput:input]; 38 }else{ 39 //出錯處理 40 NSLog(@"%@", error); 41 return; 42 } 43 if ([self.session canAddOutput:output]) { 44 [self.session addOutput:output]; 45 } 46 //設置輸出類型 有二維碼 條形碼等 47 output.metadataObjectTypes = @[AVMetadataObjectTypeQRCode //二維碼 48 ]; 49 //獲取輸出需要設置代理,在代理方法中獲取 50 [output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()]; 51 52 /** 53 接下來我們要做的不是立即開始會話(開始掃描),如果你現在調用會話的startRunning方法的話,你會發現屏幕是一片黑,這時由於我們還沒有設置相機的取景器的大小。設置取景器需要用到AVCaptureVideoPreviewLayer這個類。具體代碼如下: 54 */ 55 self.preview = [AVCaptureVideoPreviewLayer layerWithSession:self.session]; 56 self.preview.videoGravity = AVLayerVideoGravityResize; 57 [self.preview setFrame:self.view.bounds];//設置取景器的frame 58 [self.view.layer insertSublayer:self.preview atIndex:0]; 59 60 // CGSize size = self.view.bounds.size; 61 // CGSize transparentAreaSize = CGSizeMake(200,200); 62 // CGRect cropRect = CGRectMake((size.width - transparentAreaSize.width)/2, (size.height - transparentAreaSize.height)/2, transparentAreaSize.width, transparentAreaSize.height); 63 // output.rectOfInterest = CGRectMake(cropRect.origin.y/size.width, 64 // cropRect.origin.x/size.height, 65 // cropRect.size.height/size.height, 66 // cropRect.size.width/size.width); 67 68 output.rectOfInterest = CGRectMake((124)/[[UIScreen mainScreen] bounds].size.height,(([[UIScreen mainScreen] bounds].size.width-220)/2)/[[UIScreen mainScreen] bounds].size.width,220/[[UIScreen mainScreen] bounds].size.height,220/[[UIScreen mainScreen] bounds].size.width); 69 //開始掃碼 70 [self.session startRunning]; 71 } 72 73 - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{ 74 [self.session stopRunning];//停止會話 75 [self.preview removeFromSuperlayer];//移除取景器 76 if (metadataObjects.count > 0) { 77 AVMetadataMachineReadableCodeObject * obj = metadataObjects[0]; 78 NSString * result = obj.stringValue;//這就是掃描的結果 79 NSLog(@"outPut result == %@",result); 80 } 81 }