系统二维码扫描 扫描框设置 rectOfInterest

 

#import "ViewController.h"

#import <AVFoundation/AVFoundation.h>

 

@interface ViewController ()<WJScanningViewdelegate,AVCaptureMetadataOutputObjectsDelegate>

 

@property (nonatomic, strong) CIDetector *detecter;

 

@end

 

@implementation ViewController

 

- (void)viewDidLoad {

    [super viewDidLoad];

    

    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];

    AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init];

    

    AVCaptureSession *session = [[AVCaptureSession alloc] init];

    [session addInput:input];

    [session addOutput:output];

    [output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];

    [output setMetadataObjectTypes:@[AVMetadataObjectTypeQRCode]];

    [output setRectOfInterest:CGRectMake(100/self.view.frame.size.height, 50/self.view.frame.size.width, 300/self.view.frame.size.height, 400/self.view.frame.size.width)];

    

    AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];

    [layer setVideoGravity:AVLayerVideoGravityResizeAspectFill];

    [layer setFrame:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height)];

    [self.view.layer addSublayer:layer];

    [session startRunning];

    

    UIView *view = [[UIView alloc] initWithFrame:CGRectMake(50, 100, 200, 200)];

    view.backgroundColor = [UIColor clearColor];

    view.layer.borderWidth = 1;

    view.layer.borderColor = [UIColor redColor].CGColor;

    [self.view addSubview:view];

 

    //调整扫描区域   ,有些资料说设置 rectOfInterest  

   //CGRectMake(y/deviceHeight, x/deviceWidth, height/deviceHeight, width/deviceWidth);

解释下CGRectMake写法:都把x y width height 互换 了的.你扫一扫的那个框框的  起点坐标为 x  y 宽为width 高为height  ,deviceHeight ,deviceWidth指的是AVCaptureVideoPreviewLayer对象的高度,个人按这个方法弄但是有很多问题,效果不是很好。  

//个人觉得还是下面比较容易接受,与以往设置frame一样。

  __weak typeof(self) weakSelf = self;

    [[NSNotificationCenter defaultCenter]addObserverForName:AVCaptureInputPortFormatDescriptionDidChangeNotification object:nil queue:[NSOperationQueue mainQueue]

    usingBlock:^(NSNotification * _Nonnull note) {

    if (weakSelf){

    AVCaptureMetadataOutput *output = session.outputs.firstObject;

    output.rectOfInterest = [layer metadataOutputRectOfInterestForRect:CGRectMake(50, 100, 200, 200)];

    }

        

    }];

    

}

 

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {

    

    //判断是否有数据

    if (metadataObjects != nil && [metadataObjects count] > 0) {

        AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects objectAtIndex:0];

        NSLog(@"=========%@",metadataObj.stringValue);

        //判断回传的数据类型

        if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeQRCode]) {

    

        }

    }

}

 

posted @ 2017-04-28 16:26  原码  阅读(457)  评论(0编辑  收藏  举报