废话不多直接上代码看注释,大家的智慧结晶。
1.声明头文件
<p><code>
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AudioToolbox/AudioToolbox.h>
typedef void (^YYScanSuccessBlock)(NSString *scanResult);
@interface YYScanHelper : NSObject
//第一步:创建声明单例方法
+ (instancetype)manager;
@property (nonatomic, strong) UIView *scanView;
@property (nonatomic, copy) YYScanSuccessBlock scanBlock;
//开始扫描
- (void)startRunning;
//停止扫描
- (void)stopRunning;
//显示扫描扫描结果的窗口
- (void)showLayer:(UIView *)superView;
//设置扫描窗口的范围
- (void)setScanningRect:(CGRect)scanRect scanView:(UIView *)scanView;
@end
</code></p>
2.代码实现
<p><code>
#import "YYScanHelper.h"
#import <AVFoundation/AVFoundation.h>
@interface YYScanHelper()<AVCaptureMetadataOutputObjectsDelegate>
{
//声明扫描需要的系统类
AVCaptureSession *_session;
AVCaptureVideoPreviewLayer *_layer;
AVCaptureMetadataOutput *_output;
AVCaptureDeviceInput *_input;
UIView *_superView;
}
@end
@implementation YYScanHelper
+ (instancetype)manager
{
//封装非标准单利
static YYScanHelper *singleton = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
singleton = [[YYScanHelper alloc] init];
});
return singleton;
}
- (id)init
{
self = [super init];
if (self) {
//初始化链接对象
_session = [[AVCaptureSession alloc]init];
//高质量采集
[_session setSessionPreset:AVCaptureSessionPresetHigh];
//避免模拟器运行崩溃
if (!TARGET_IPHONE_SIMULATOR) {
//获取摄像设备
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//创建输入流
_input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
//添加到链接对象
[_session addInput:_input];
//创建输出流
_output = [[AVCaptureMetadataOutput alloc]init];
//添加到链接对象
[_session addOutput:_output];
//设置代理 在主线程里刷新
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
//设置扫码支持的编码格式(如下设置条形码和二维码兼容)
_output.metadataObjectTypes = @[AVMetadataObjectTypeQRCode,
AVMetadataObjectTypeEAN8Code,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeCode128Code];
}
_layer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_layer.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
return self;
}
- (void)startRunning {
//开始捕获
[_session startRunning];
}
-
(void)stopRunning {
//停止捕获
[_session stopRunning];
}
pragma mark - AVCaptureMetadataOutputObjects Delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
if (metadataObjects.count > 0) {
//扫码完成
[_session stopRunning];
//播放系统声音(ID可选)
AudioServicesPlaySystemSound(1200);
AVMetadataMachineReadableCodeObject *metadataObject = [metadataObjects objectAtIndex:0];
if (self.scanBlock) {
self.scanBlock(metadataObject.stringValue);
}
//输出扫描字符串
NSLog(@"输出扫描字符串为:%@",metadataObject.stringValue);
}
}
/**
* @param 设置扫描范围区域 CGRectMake (y的起点/屏幕的高,x的起点/屏幕的款,扫描的区域的高/屏幕的高,扫描的区域的宽/屏幕的宽)
*
* @param scanRect 扫描范围
* @param scanView 扫描框
*/
- (void)setScanningRect:(CGRect)scanRect scanView:(UIView *)scanView
{
CGFloat x,y,width,height;
x = scanRect.origin.y / _layer.frame.size.height;
y = scanRect.origin.x / _layer.frame.size.width;
width = scanRect.size.height / _layer.frame.size.height;
height = scanRect.size.width / _layer.frame.size.width;
_output.rectOfInterest = CGRectMake(x, y, width, height);
self.scanView = scanView;
if (self.scanView) {
self.scanView.frame = scanRect;
if (_superView) {
[_superView addSubview:self.scanView];
}
}
}
/**
* @param显示图层
*
* @param superView 需要在哪个View显示
*/
- (void)showLayer:(UIView *)superView
{
_superView = superView;
_layer.frame = superView.layer.frame;
[superView.layer insertSublayer:_layer atIndex:0];
}
@end
</code></p>
3.使用方法
<p><code>
[[YYScanHelper manager] showLayer:self.view];
[[YYScanHelper manager] setScanningRect:self.scanRect scanView:scanRectView];
[[YYScanHelper manager] setScanBlock:^(NSString *scanResult){
NSLog(@"打印扫描传值结果%@",scanResult);
[[YYScanHelper manager] startRunning];
</code></p>