AVCaptureViewController.m 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656
  1. //
  2. // AVCaptureViewController.m
  3. // 实时视频Demo
  4. //
  5. // Created by zhongfeng1 on 2017/2/16.
  6. // Copyright © 2017年 zhongfeng. All rights reserved.
  7. //
  8. #import "AVCaptureViewController.h"
  9. #import <AVFoundation/AVFoundation.h>
  10. #import <AssetsLibrary/AssetsLibrary.h>
  11. #import "LHSIDCardScaningView.h"
  12. #import "IDInfo.h"
  13. #import "excards.h"
  14. //#import "IDInfoViewController.h"
  15. #import "UIImage+Extend.h"
  16. #import "RectManager.h"
  17. #import "UIAlertController+Extend.h"
  18. @interface AVCaptureViewController () <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureMetadataOutputObjectsDelegate>
  19. // 摄像头设备
  20. @property (nonatomic,strong) AVCaptureDevice *device;
  21. // AVCaptureSession对象来执行输入设备和输出设备之间的数据传递
  22. @property (nonatomic,strong) AVCaptureSession *session;
  23. // 输出格式
  24. @property (nonatomic,strong) NSNumber *outPutSetting;
  25. // 出流对象
  26. @property (nonatomic,strong) AVCaptureVideoDataOutput *videoDataOutput;
  27. // 元数据(用于人脸识别)
  28. @property (nonatomic,strong) AVCaptureMetadataOutput *metadataOutput;
  29. // 预览图层
  30. @property (nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;
  31. // 人脸检测框区域
  32. @property (nonatomic,assign) CGRect faceDetectionFrame;
  33. // 队列
  34. @property (nonatomic,strong) dispatch_queue_t queue;
  35. // 是否打开手电筒
  36. @property (nonatomic,assign,getter = isTorchOn) BOOL torchOn;
  37. @end
  38. @implementation AVCaptureViewController
  39. #pragma mark - 检测是模拟器还是真机
  40. #if TARGET_IPHONE_SIMULATOR
  41. // 是模拟器的话,提示“请使用真机测试!!!”
  42. -(void)viewDidLoad {
  43. [super viewDidLoad];
  44. self.navigationItem.title = @"扫描身份证";
  45. __weak __typeof__(self) weakSelf = self;
  46. UIAlertAction *okAction = [UIAlertAction actionWithTitle:@"返回" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
  47. [weakSelf dismissViewControllerAnimated:YES completion:nil];
  48. //[weakSelf.navigationController popViewControllerAnimated:YES];
  49. }];
  50. UIAlertController *alertC = [UIAlertController alertControllerWithTitle:@"模拟器没有摄像设备" message:@"请使用真机测试!!!" okAction:okAction cancelAction:nil];
  51. [self presentViewController:alertC animated:YES completion:nil];
  52. }
  53. #else
  54. #pragma mark - 懒加载
  55. #pragma mark device
  56. -(AVCaptureDevice *)device {
  57. if (_device == nil) {
  58. _device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  59. NSError *error = nil;
  60. if ([_device lockForConfiguration:&error]) {
  61. if ([_device isSmoothAutoFocusSupported]) {// 平滑对焦
  62. _device.smoothAutoFocusEnabled = YES;
  63. }
  64. if ([_device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {// 自动持续对焦
  65. _device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
  66. }
  67. if ([_device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure ]) {// 自动持续曝光
  68. _device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
  69. }
  70. if ([_device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {// 自动持续白平衡
  71. _device.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
  72. }
  73. // NSError *error1;
  74. // CMTime frameDuration = CMTimeMake(1, 30); // 默认是1秒30帧
  75. // NSArray *supportedFrameRateRanges = [_device.activeFormat videoSupportedFrameRateRanges];
  76. // BOOL frameRateSupported = NO;
  77. // for (AVFrameRateRange *range in supportedFrameRateRanges) {
  78. // if (CMTIME_COMPARE_INLINE(frameDuration, >=, range.minFrameDuration) && CMTIME_COMPARE_INLINE(frameDuration, <=, range.maxFrameDuration)) {
  79. // frameRateSupported = YES;
  80. // }
  81. // }
  82. //
  83. // if (frameRateSupported && [self.device lockForConfiguration:&error1]) {
  84. // [_device setActiveVideoMaxFrameDuration:frameDuration];
  85. // [_device setActiveVideoMinFrameDuration:frameDuration];
  86. //// [self.device unlockForConfiguration];
  87. // }
  88. [_device unlockForConfiguration];
  89. }
  90. }
  91. return _device;
  92. }
  93. #pragma mark outPutSetting
  94. -(NSNumber *)outPutSetting {
  95. if (_outPutSetting == nil) {
  96. _outPutSetting = @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
  97. }
  98. return _outPutSetting;
  99. }
  100. #pragma mark metadataOutput
  101. -(AVCaptureMetadataOutput *)metadataOutput {
  102. if (_metadataOutput == nil) {
  103. _metadataOutput = [[AVCaptureMetadataOutput alloc]init];
  104. [_metadataOutput setMetadataObjectsDelegate:self queue:self.queue];
  105. }
  106. return _metadataOutput;
  107. }
  108. #pragma mark videoDataOutput
  109. -(AVCaptureVideoDataOutput *)videoDataOutput {
  110. if (_videoDataOutput == nil) {
  111. _videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
  112. _videoDataOutput.alwaysDiscardsLateVideoFrames = YES;
  113. _videoDataOutput.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey:self.outPutSetting};
  114. [_videoDataOutput setSampleBufferDelegate:self queue:self.queue];
  115. }
  116. return _videoDataOutput;
  117. }
  118. #pragma mark session
  119. -(AVCaptureSession *)session {
  120. if (_session == nil) {
  121. _session = [[AVCaptureSession alloc] init];
  122. _session.sessionPreset = AVCaptureSessionPresetHigh;
  123. // 2、设置输入:由于模拟器没有摄像头,因此最好做一个判断
  124. NSError *error = nil;
  125. AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:&error];
  126. if (error) {
  127. UIAlertAction *okAction = [UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:nil];
  128. [self alertControllerWithTitle:@"没有摄像设备" message:error.localizedDescription okAction:okAction cancelAction:nil];
  129. }else {
  130. if ([_session canAddInput:input]) {
  131. [_session addInput:input];
  132. }
  133. if ([_session canAddOutput:self.videoDataOutput]) {
  134. [_session addOutput:self.videoDataOutput];
  135. }
  136. if(_cardType == 0)
  137. {
  138. if ([_session canAddOutput:self.metadataOutput]) {
  139. [_session addOutput:self.metadataOutput];
  140. // 输出格式要放在addOutPut之后,否则奔溃
  141. self.metadataOutput.metadataObjectTypes = @[AVMetadataObjectTypeFace];
  142. }
  143. }
  144. }
  145. }
  146. return _session;
  147. }
  148. #pragma mark previewLayer
  149. -(AVCaptureVideoPreviewLayer *)previewLayer {
  150. if (_previewLayer == nil) {
  151. _previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
  152. _previewLayer.frame = self.view.frame;
  153. _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  154. }
  155. return _previewLayer;
  156. }
  157. #pragma mark queue
  158. -(dispatch_queue_t)queue {
  159. if (_queue == nil) {
  160. // _queue = dispatch_queue_create("AVCaptureSession_Start_Running_Queue", DISPATCH_QUEUE_SERIAL);
  161. _queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
  162. }
  163. return _queue;
  164. }
  165. #pragma mark - 运行session
  166. // session开始,即输入设备和输出设备开始数据传递
  167. - (void)runSession {
  168. if (![self.session isRunning]) {
  169. dispatch_async(self.queue, ^{
  170. [self.session startRunning];
  171. });
  172. }
  173. }
  174. #pragma mark - 停止session
  175. // session停止,即输入设备和输出设备结束数据传递
  176. -(void)stopSession {
  177. if ([self.session isRunning]) {
  178. dispatch_async(self.queue, ^{
  179. [self.session stopRunning];
  180. });
  181. }
  182. }
  183. #pragma mark - 打开/关闭手电筒
  184. -(void)turnOnOrOffTorch {
  185. self.torchOn = !self.isTorchOn;
  186. if ([self.device hasTorch]){ // 判断是否有闪光灯
  187. [self.device lockForConfiguration:nil];// 请求独占访问硬件设备
  188. if (self.isTorchOn) {
  189. self.navigationItem.rightBarButtonItem.image = [[UIImage imageNamed:@"nav_torch_on"] originalImage];
  190. [self.device setTorchMode:AVCaptureTorchModeOn];
  191. } else {
  192. self.navigationItem.rightBarButtonItem.image = [[UIImage imageNamed:@"nav_torch_off"] originalImage];
  193. [self.device setTorchMode:AVCaptureTorchModeOff];
  194. }
  195. [self.device unlockForConfiguration];// 请求解除独占访问硬件设备
  196. }else {
  197. UIAlertAction *okAction = [UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:nil];
  198. [self alertControllerWithTitle:@"提示" message:@"您的设备没有闪光设备,不能提供手电筒功能,请检查" okAction:okAction cancelAction:nil];
  199. }
  200. }
  201. #pragma mark - view即将出现时
  202. -(void)viewWillAppear:(BOOL)animated {
  203. [super viewWillAppear:animated];
  204. // 将AVCaptureViewController的navigationBar调为透明
  205. [[[self.navigationController.navigationBar subviews] objectAtIndex:0] setAlpha:0];
  206. // [self.navigationController.navigationBar setTitleTextAttributes:@{NSForegroundColorAttributeName:[UIColor whiteColor]}];
  207. // 每次展现AVCaptureViewController的界面时,都检查摄像头使用权限
  208. [self checkAuthorizationStatus];
  209. // rightBarButtonItem设为原样
  210. self.torchOn = NO;
  211. self.navigationItem.rightBarButtonItem.image = [[UIImage imageNamed:@"nav_torch_off"] originalImage];
  212. }
  213. #pragma mark - view即将消失时
  214. -(void)viewWillDisappear:(BOOL)animated {
  215. [super viewWillDisappear:animated];
  216. // 将AVCaptureViewController的navigationBar调为不透明
  217. [[[self.navigationController.navigationBar subviews] objectAtIndex:0] setAlpha:1];
  218. // [self.navigationController.navigationBar setTitleTextAttributes:@{NSForegroundColorAttributeName:[UIColor blackColor]}];
  219. [self stopSession];
  220. }
  221. - (void)viewDidLoad {
  222. [super viewDidLoad];
  223. // Do any additional setup after loading the view, typically from a nib.
  224. self.navigationItem.title = @"扫描身份证";
  225. // 初始化rect
  226. const char *thePath = [[[NSBundle mainBundle] resourcePath] UTF8String];
  227. int ret = EXCARDS_Init(thePath);
  228. if (ret != 0) {
  229. NSLog(@"初始化失败:ret=%d", ret);
  230. }
  231. // 添加预览图层
  232. [self.view.layer addSublayer:self.previewLayer];
  233. // 添加自定义的扫描界面(中间有一个镂空窗口和来回移动的扫描线)
  234. LHSIDCardScaningView *IDCardScaningView = [[LHSIDCardScaningView alloc] initWithFrame:self.view.frame];
  235. IDCardScaningView.cardType = _cardType;
  236. [IDCardScaningView loadImage];
  237. self.faceDetectionFrame = IDCardScaningView.facePathRect;
  238. [self.view addSubview:IDCardScaningView];
  239. // 设置人脸扫描区域
  240. /*
  241. 为什么做人脸扫描?
  242. 经实践证明,由于预览图层是全屏的,当用户有时没有将身份证对准拍摄框边缘时,也会成功读取身份证上的信息,即也会捕获到不完整的身份证图像。
  243. 因此,为了截取到比较完整的身份证图像,在自定义扫描界面的合适位置上加了一个身份证头像框,让用户将该小框对准身份证上的头像,最终目的是使程序截取到完整的身份证图像。
  244. 当该小框检测到人脸时,再对比人脸区域是否在这个小框内,若在,说明用户的确将身份证头像放在了这个框里,那么此时这一帧身份证图像大小正好合适且完整,接下来才捕获该帧,就获得了完整的身份证截图。(若不在,那么就不捕获此时的图像)
  245. 理解:检测身份证上的人脸是为了获得证上的人脸区域,获得人脸区域是为了希望人脸区域能在小框内,这样的话,才截取到完整的身份证图像。
  246. 个人认为:有了文字、拍摄区域的提示,99%的用户会主动将身份证和拍摄框边缘对齐,就能够获得完整的身份证图像,不做人脸区域的检测也可以。。。
  247. ps: 如果你不想加入人脸识别这一功能,你的app无需这么精细的话或者你又想读取到身份证反面的信息(签发机关,有效期),请这样做:
  248. 1、请注释掉所有metadataOutput的代码及其下面的那个代理方法(-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection)
  249. 2、请在videoDataOutput的懒加载方法的if(_videoDataOutput == nil){}语句块中添加[_videoDataOutput setSampleBufferDelegate:self queue:self.queue];
  250. 3、请注释掉AVCaptureVideoDataOutputSampleBufferDelegate下的那个代理方法中的
  251. if (self.videoDataOutput.sampleBufferDelegate) {
  252. [self.videoDataOutput setSampleBufferDelegate:nil queue:self.queue];
  253. }
  254. 4、运行程序,身份证正反两面皆可被检测到,请查看打印的信息。
  255. */
  256. // [[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureInputPortFormatDescriptionDidChangeNotification object:nil queue:[NSOperationQueue currentQueue] usingBlock:^(NSNotification* _Nonnull note) {
  257. // __weak __typeof__(self) weakSelf = self;
  258. // self.metadataOutput.rectOfInterest = [self.previewLayer metadataOutputRectOfInterestForRect:IDCardScaningView.facePathRect];
  259. // }];
  260. // 添加关闭按钮
  261. [self addCloseButton];
  262. // 添加rightBarButtonItem为打开/关闭手电筒
  263. self.navigationItem.rightBarButtonItem = [[UIBarButtonItem alloc] initWithTitle:nil style:UIBarButtonItemStylePlain target:self action:@selector(turnOnOrOffTorch)];
  264. }
  265. #pragma mark - 添加关闭按钮
  266. -(void)addCloseButton {
  267. UIButton *closeBtn = [UIButton buttonWithType:UIButtonTypeCustom];
  268. [closeBtn setImage:[UIImage imageNamed:@"common_icon_close"] forState:UIControlStateNormal];
  269. CGFloat closeBtnWidth = 40;
  270. CGFloat closeBtnHeight = closeBtnWidth;
  271. CGRect viewFrame = self.view.frame;
  272. closeBtn.frame = (CGRect){CGRectGetMaxX(viewFrame) - closeBtnWidth - 15, CGRectGetMaxY(viewFrame) - closeBtnHeight - 20, closeBtnWidth, closeBtnHeight};
  273. [closeBtn addTarget:self action:@selector(close) forControlEvents:UIControlEventTouchUpInside];
  274. [self.view addSubview:closeBtn];
  275. }
  276. #pragma mark 绑定“关闭按钮”的方法
  277. -(void)close {
  278. [self dismissViewControllerAnimated:YES completion:nil];
  279. //[self.navigationController popViewControllerAnimated:YES];
  280. }
  281. #pragma mark - 检测摄像头权限
  282. -(void)checkAuthorizationStatus {
  283. AVAuthorizationStatus authorizationStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
  284. switch (authorizationStatus) {
  285. case AVAuthorizationStatusNotDetermined:[self showAuthorizationNotDetermined]; break;// 用户尚未决定授权与否,那就请求授权
  286. case AVAuthorizationStatusAuthorized:[self showAuthorizationAuthorized]; break;// 用户已授权,那就立即使用
  287. case AVAuthorizationStatusDenied:[self showAuthorizationDenied]; break;// 用户明确地拒绝授权,那就展示提示
  288. case AVAuthorizationStatusRestricted:[self showAuthorizationRestricted]; break;// 无法访问相机设备,那就展示提示
  289. }
  290. }
  291. #pragma mark - 相机使用权限处理
  292. #pragma mark 用户还未决定是否授权使用相机
  293. -(void)showAuthorizationNotDetermined {
  294. __weak __typeof__(self) weakSelf = self;
  295. [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
  296. granted? [weakSelf runSession]: [weakSelf showAuthorizationDenied];
  297. }];
  298. }
  299. #pragma mark 被授权使用相机
  300. -(void)showAuthorizationAuthorized {
  301. [self runSession];
  302. }
  303. #pragma mark 未被授权使用相机
  304. -(void)showAuthorizationDenied {
  305. NSString *title = @"相机未授权";
  306. NSString *message = @"请到系统的“设置-隐私-相机”中授权此应用使用您的相机";
  307. UIAlertAction *okAction = [UIAlertAction actionWithTitle:@"去设置" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
  308. // 跳转到该应用的隐私设授权置界面
  309. [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]];
  310. }];
  311. UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:@"取消" style:UIAlertActionStyleDefault handler:nil];
  312. [self alertControllerWithTitle:title message:message okAction:okAction cancelAction:cancelAction];
  313. }
  314. #pragma mark 使用相机设备受限
  315. -(void)showAuthorizationRestricted {
  316. NSString *title = @"相机设备受限";
  317. NSString *message = @"请检查您的手机硬件或设置";
  318. UIAlertAction *okAction = [UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:nil];
  319. [self alertControllerWithTitle:title message:message okAction:okAction cancelAction:nil];
  320. }
  321. #pragma mark - 展示UIAlertController
  322. -(void)alertControllerWithTitle:(NSString *)title message:(NSString *)message okAction:(UIAlertAction *)okAction cancelAction:(UIAlertAction *)cancelAction {
  323. UIAlertController *alertController = [UIAlertController alertControllerWithTitle:title message:message okAction:okAction cancelAction:cancelAction];
  324. [self presentViewController:alertController animated:YES completion:nil];
  325. }
  326. #pragma mark - AVCaptureMetadataOutputObjectsDelegate
  327. #pragma mark 从输出的元数据中捕捉人脸
  328. // 检测人脸是为了获得“人脸区域”,做“人脸区域”与“身份证人像框”的区域对比,当前者在后者范围内的时候,才能截取到完整的身份证图像
  329. -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
  330. if (metadataObjects.count) {
  331. AVMetadataMachineReadableCodeObject *metadataObject = metadataObjects.firstObject;
  332. AVMetadataObject *transformedMetadataObject = [self.previewLayer transformedMetadataObjectForMetadataObject:metadataObject];
  333. CGRect faceRegion = transformedMetadataObject.bounds;
  334. if (metadataObject.type == AVMetadataObjectTypeFace) {
  335. NSLog(@"是否包含头像:%d, facePathRect: %@, faceRegion: %@",CGRectContainsRect(self.faceDetectionFrame, faceRegion),NSStringFromCGRect(self.faceDetectionFrame),NSStringFromCGRect(faceRegion));
  336. if (CGRectContainsRect(self.faceDetectionFrame, faceRegion)) {// 只有当人脸区域的确在小框内时,才再去做捕获此时的这一帧图像
  337. // 为videoDataOutput设置代理,程序就会自动调用下面的代理方法,捕获每一帧图像
  338. if (!self.videoDataOutput.sampleBufferDelegate) {
  339. [self.videoDataOutput setSampleBufferDelegate:self queue:self.queue];
  340. }
  341. }
  342. }
  343. }
  344. }
  345. #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
  346. #pragma mark 从输出的数据流捕捉单一的图像帧
  347. // AVCaptureVideoDataOutput获取实时图像,这个代理方法的回调频率很快,几乎与手机屏幕的刷新频率一样快
  348. -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
  349. if ([self.outPutSetting isEqualToNumber:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]] || [self.outPutSetting isEqualToNumber:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]]) {
  350. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  351. if ([captureOutput isEqual:self.videoDataOutput]) {
  352. // 身份证信息识别
  353. [self IDCardRecognit:imageBuffer];
  354. // 身份证信息识别完毕后,就将videoDataOutput的代理去掉,防止频繁调用AVCaptureVideoDataOutputSampleBufferDelegate方法而引起的“混乱”
  355. if(_cardType == 0)
  356. {
  357. if (self.videoDataOutput.sampleBufferDelegate) {
  358. [self.videoDataOutput setSampleBufferDelegate:nil queue:self.queue];
  359. }
  360. }
  361. }
  362. } else {
  363. NSLog(@"输出格式不支持");
  364. }
  365. }
  366. #pragma mark - 身份证信息识别
  367. - (void)IDCardRecognit:(CVImageBufferRef)imageBuffer {
  368. CVBufferRetain(imageBuffer);
  369. // Lock the image buffer
  370. if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess) {
  371. size_t width= CVPixelBufferGetWidth(imageBuffer);// 1920
  372. size_t height = CVPixelBufferGetHeight(imageBuffer);// 1080
  373. CVPlanarPixelBufferInfo_YCbCrBiPlanar *planar = CVPixelBufferGetBaseAddress(imageBuffer);
  374. size_t offset = NSSwapBigIntToHost(planar->componentInfoY.offset);
  375. size_t rowBytes = NSSwapBigIntToHost(planar->componentInfoY.rowBytes);
  376. unsigned char* baseAddress = (unsigned char *)CVPixelBufferGetBaseAddress(imageBuffer);
  377. unsigned char* pixelAddress = baseAddress + offset;
  378. static unsigned char *buffer = NULL;
  379. if (buffer == NULL) {
  380. buffer = (unsigned char *)malloc(sizeof(unsigned char) * width * height);
  381. }
  382. memcpy(buffer, pixelAddress, sizeof(unsigned char) * width * height);
  383. unsigned char pResult[1024];
  384. int ret = EXCARDS_RecoIDCardData(buffer, (int)width, (int)height, (int)rowBytes, (int)8, (char*)pResult, sizeof(pResult));
  385. if (ret <= 0) {
  386. NSLog(@"ret=[%d]", ret);
  387. } else {
  388. NSLog(@"ret=[%d]", ret);
  389. // 播放一下“拍照”的声音,模拟拍照
  390. AudioServicesPlaySystemSound(1108);
  391. if ([self.session isRunning]) {
  392. [self.session stopRunning];
  393. }
  394. char ctype;
  395. char content[256];
  396. int xlen;
  397. int i = 0;
  398. IDInfo *iDInfo = [[IDInfo alloc] init];
  399. iDInfo.type=self.cardType+1;
  400. ctype = pResult[i++];
  401. // iDInfo.type = ctype;
  402. while(i < ret){
  403. ctype = pResult[i++];
  404. for(xlen = 0; i < ret; ++i){
  405. if(pResult[i] == ' ') { ++i; break; }
  406. content[xlen++] = pResult[i];
  407. }
  408. content[xlen] = 0;
  409. if(xlen) {
  410. NSStringEncoding gbkEncoding = CFStringConvertEncodingToNSStringEncoding(kCFStringEncodingGB_18030_2000);
  411. if(ctype == 0x21) {
  412. iDInfo.num = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
  413. } else if(ctype == 0x22) {
  414. iDInfo.name = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
  415. } else if(ctype == 0x23) {
  416. iDInfo.gender = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
  417. } else if(ctype == 0x24) {
  418. iDInfo.nation = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
  419. } else if(ctype == 0x25) {
  420. iDInfo.address = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
  421. } else if(ctype == 0x26) {
  422. iDInfo.issue = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
  423. } else if(ctype == 0x27) {
  424. iDInfo.valid = [NSString stringWithCString:(char *)content encoding:gbkEncoding];
  425. }
  426. }
  427. }
  428. NSArray *saleArray = [iDInfo.valid componentsSeparatedByString:@"-"];
  429. iDInfo.valid=[NSString stringWithFormat:@"%@-%@-%@",[saleArray[1] substringToIndex:4],[saleArray[1] substringWithRange:NSMakeRange(4,2)],[saleArray[1] substringFromIndex:6]];
  430. if (iDInfo) {// 读取到身份证信息,实例化出IDInfo对象后,截取身份证的有效区域,获取到图像
  431. NSLog(@"\n正面\n姓名:%@\n性别:%@\n民族:%@\n住址:%@\n公民身份证号码:%@\n\n反面\n签发机关:%@\n有效期限:%@",iDInfo.name,iDInfo.gender,iDInfo.nation,iDInfo.address,iDInfo.num,iDInfo.issue,iDInfo.valid);
  432. CGRect effectRect = [RectManager getEffectImageRect:CGSizeMake(width, height)];
  433. CGRect rect = [RectManager getGuideFrame:effectRect];
  434. UIImage *image = [UIImage getImageStream:imageBuffer];
  435. UIImage *subImage = [UIImage getSubImage:rect inImage:image];
  436. dispatch_async(dispatch_get_main_queue(), ^{
  437. [self.delegate AVCaptureViewReData:iDInfo img:subImage];
  438. //[self.navigationController popViewControllerAnimated:YES];
  439. [self dismissViewControllerAnimated:YES completion:nil];
  440. });
  441. // 推出IDInfoVC(展示身份证信息的控制器)
  442. // IDInfoViewController *IDInfoVC = [[IDInfoViewController alloc] init];
  443. //
  444. // IDInfoVC.IDInfo = iDInfo;// 身份证信息
  445. // IDInfoVC.IDImage = subImage;// 身份证图像
  446. //
  447. // dispatch_async(dispatch_get_main_queue(), ^{
  448. // [self.navigationController pushViewController:IDInfoVC animated:YES];
  449. // });
  450. }
  451. }
  452. CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
  453. }
  454. CVBufferRelease(imageBuffer);
  455. }
  456. /*
  457. - (UIImage*)imageWithImageSimple:(NSData *)data scaledToSize:(CGSize)newSize {
  458. UIImage *image = [UIImage imageWithData:data];
  459. // Create a graphics image context
  460. UIGraphicsBeginImageContext(newSize);
  461. // Tell the old image to draw in this new context, with the desired
  462. // new size
  463. [image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
  464. // Get the new image from the context
  465. UIImage * newImage = UIGraphicsGetImageFromCurrentImageContext();
  466. // End the context
  467. UIGraphicsEndImageContext();
  468. // Return the new image.
  469. return newImage;
  470. }
  471. - (UIImage *)clipImageWithImage:(UIImage *)image InRect:(CGRect)rect {
  472. CGImageRef imageRef = CGImageCreateWithImageInRect([image CGImage], rect);
  473. UIImage *thumbScale = [UIImage imageWithCGImage:imageRef];
  474. CGImageRelease(imageRef);
  475. return thumbScale;
  476. }
  477. - (void)addConnection {
  478. AVCaptureConnection *videoConnection;
  479. for (AVCaptureConnection *connection in [self.videoDataOutput connections]) {
  480. for (AVCaptureInputPort *port in [connection inputPorts]) {
  481. if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
  482. videoConnection = connection;
  483. }
  484. }
  485. }
  486. if ([videoConnection isVideoStabilizationSupported]) {
  487. if ([[[UIDevice currentDevice] systemVersion] floatValue] < 8.0) {
  488. videoConnection.enablesVideoStabilizationWhenAvailable = YES;
  489. }
  490. else {
  491. videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
  492. }
  493. }
  494. }
  495. - (void)configureDevice:(AVCaptureDevice *)device {
  496. // Use Smooth focus
  497. if( YES == [device lockForConfiguration:NULL] )
  498. {
  499. if([device respondsToSelector:@selector(setSmoothAutoFocusEnabled:)] && [device isSmoothAutoFocusSupported] )
  500. {
  501. [device setSmoothAutoFocusEnabled:YES];
  502. }
  503. AVCaptureFocusMode currentMode = [device focusMode];
  504. if( currentMode == AVCaptureFocusModeLocked )
  505. {
  506. currentMode = AVCaptureFocusModeAutoFocus;
  507. }
  508. if( [device isFocusModeSupported:currentMode] )
  509. {
  510. [device setFocusMode:currentMode];
  511. }
  512. [device unlockForConfiguration];
  513. }
  514. }
  515. */
  516. - (void)didReceiveMemoryWarning {
  517. [super didReceiveMemoryWarning];
  518. // Dispose of any resources that can be recreated.
  519. }
  520. #endif
  521. @end