RAQRCodeScannerViewController.m 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450
  1. //
  2. // RAQRCodeScannerViewController.m
  3. // Apex And Drivers
  4. //
  5. // Created by Jack on 2018/6/5.
  6. // Copyright © 2018年 USAI. All rights reserved.
  7. //
  8. #import "RAQRCodeScannerViewController.h"
  9. #import <AVKit/AVKit.h>
  10. @interface RAQRCodeScannerViewController () <AVCaptureMetadataOutputObjectsDelegate>
  11. @property (strong, nonatomic) IBOutlet UIView *scanerView;
  12. @property (nonatomic,strong) IBOutlet UIView *previewContainer;
  13. @property (strong, nonatomic) IBOutlet UIView *maskView;
  14. @property (strong, nonatomic) IBOutlet UIButton *scanBtn;
  15. @property (strong, nonatomic) IBOutlet UIButton *backBtn;
  16. @property (nonatomic,strong) AVCaptureDevice *device;
  17. @property (nonatomic,strong) AVCaptureDeviceInput *input;
  18. @property (nonatomic,strong) AVCaptureMetadataOutput *output;
  19. @property (nonatomic,strong) AVCaptureSession *session;
  20. @property (nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;
  21. @property (nonatomic,assign) BOOL scannerEnable;
  22. @property (nonatomic,assign) BOOL scannerInitial;
  23. @property (nonatomic,strong) CAGradientLayer *scanLineLayer;
  24. @property (nonatomic,strong) CAShapeLayer *maskLayer;
  25. @property (nonatomic,strong) CAShapeLayer *rectLayer;
  26. @property (nonatomic,assign) BOOL navigationBarStatus;
  27. @property (weak, nonatomic) IBOutlet UITextView *tvdbg_log;
  28. @end
  29. @implementation RAQRCodeScannerViewController
  30. + (NSString *)storyboardID {
  31. return NSStringFromClass([self class]);
  32. }
  33. + (instancetype)viewControllerFromStoryboard {
  34. RAQRCodeScannerViewController *scannerVC = [[UIStoryboard storyboardWithName:@"QRCode" bundle:nil] instantiateViewControllerWithIdentifier:[self storyboardID]];
  35. return scannerVC;
  36. }
  37. - (void)viewDidLoad {
  38. [super viewDidLoad];
  39. self.tvdbg_log.hidden = true;
  40. // Do any additional setup after loading the view.
  41. if (self.navigationController) {
  42. self.navigationBarStatus = self.navigationController.navigationBarHidden;
  43. [self.navigationController setNavigationBarHidden:YES animated:NO];
  44. }
  45. self.scanerView.layer.borderColor = [UIColor blackColor].CGColor;
  46. self.scanerView.layer.borderWidth = 0.5f;
  47. UIImage *normal_img = [self.class imageWithColor:[UIColor redColor] Size:CGSizeMake(60, 60)];
  48. UIImage *highlight_img = [self.class imageWithColor:[UIColor greenColor] Size:CGSizeMake(60, 60)];
  49. [self.scanBtn setImage:normal_img forState:UIControlStateNormal];
  50. [self.scanBtn setImage:highlight_img forState:UIControlStateHighlighted];
  51. if ([self camerAuthorization]) {
  52. [self initCapture];
  53. } else {
  54. [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
  55. dispatch_async(dispatch_get_main_queue(), ^{
  56. if (granted) {
  57. [self initCapture];
  58. [self resetPreview];
  59. if (![self.session isRunning]) {
  60. [self.session startRunning];
  61. }
  62. } else {
  63. NSDictionary* infoDict =[[NSBundle mainBundle] infoDictionary];
  64. NSString *appName = [infoDict objectForKey:@"CFBundleDisplayName"];
  65. if (!appName) {
  66. appName = [infoDict objectForKey:@"CFBundleName"];
  67. }
  68. __weak typeof(self) weakSelf = self;
  69. UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"Warning" message:[NSString stringWithFormat:@"Camera access denied, please change %@ setting, allow App use camera. (setting -> privacy -> camera enable %@)",[UIDevice currentDevice].model,appName] preferredStyle:UIAlertControllerStyleAlert];
  70. UIAlertAction *action = [UIAlertAction actionWithTitle:@"Ok" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
  71. [weakSelf didCancel];
  72. }];
  73. [alert addAction:action];
  74. [self presentViewController:alert animated:YES completion:nil];
  75. }
  76. });
  77. }];
  78. }
  79. }
  80. //- (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation {
  81. // return UIInterfaceOrientationPortrait;
  82. //}
  83. - (AVCaptureVideoOrientation)captureVideoOrientation {
  84. AVCaptureVideoOrientation result;
  85. UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
  86. switch (deviceOrientation) {
  87. case UIDeviceOrientationPortrait:
  88. case UIDeviceOrientationFaceUp:
  89. case UIDeviceOrientationFaceDown:
  90. result = AVCaptureVideoOrientationPortrait;
  91. break;
  92. case UIDeviceOrientationPortraitUpsideDown:
  93. //如果这里设置成AVCaptureVideoOrientationPortraitUpsideDown,则视频方向和拍摄时的方向是相反的。
  94. result = AVCaptureVideoOrientationPortrait;
  95. break;
  96. case UIDeviceOrientationLandscapeLeft:
  97. result = AVCaptureVideoOrientationLandscapeRight;
  98. break;
  99. case UIDeviceOrientationLandscapeRight:
  100. result = AVCaptureVideoOrientationLandscapeLeft;
  101. break;
  102. default:
  103. result = AVCaptureVideoOrientationPortrait;
  104. break;
  105. }
  106. return result;
  107. }
  108. - (void)viewDidLayoutSubviews {
  109. [super viewDidLayoutSubviews];
  110. [self resetPreview];
  111. }
  112. - (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id<UIViewControllerTransitionCoordinator>)coordinator {
  113. [super viewWillTransitionToSize:size withTransitionCoordinator:coordinator];
  114. // NSLog(@"device orientation: %ld & statusbar orientaion: %ld",[UIDevice currentDevice].orientation,[UIApplication sharedApplication].statusBarOrientation);
  115. }
  116. - (void)viewWillAppear:(BOOL)animated {
  117. [super viewWillAppear:animated];
  118. if (self.scannerInitial) {
  119. // [self.session startRunning];
  120. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  121. [self.session startRunning];
  122. });
  123. }
  124. }
  125. - (void)viewWillDisappear:(BOOL)animated {
  126. [super viewWillDisappear:animated];
  127. if ([self.session isRunning]) {
  128. [self.session stopRunning];
  129. }
  130. }
  131. - (void)didReceiveMemoryWarning {
  132. [super didReceiveMemoryWarning];
  133. // Dispose of any resources that can be recreated.
  134. }
  135. - (void)resetPreview {
  136. self.previewLayer.frame = self.previewContainer.bounds;
  137. AVCaptureVideoOrientation orientation = [self captureVideoOrientation];
  138. if (self.previewLayer.connection.isVideoOrientationSupported) {
  139. self.previewLayer.connection.videoOrientation = orientation;
  140. }
  141. CGFloat w = CGRectGetWidth(self.previewContainer.bounds);
  142. CGFloat h = CGRectGetHeight(self.previewContainer.bounds);
  143. /**
  144. rectOfInterest
  145. 竖屏 x轴和y轴要交换一下
  146. Left、Right:Home键反方向X为0
  147. */
  148. CGRect rect = CGRectMake(CGRectGetMinY(self.scanerView.frame) / h, CGRectGetMinX(self.scanerView.frame) / w, CGRectGetHeight(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w);
  149. if (orientation == AVCaptureVideoOrientationLandscapeRight) {
  150. rect = CGRectMake(CGRectGetMinX(self.scanerView.frame) / w, CGRectGetMinY(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w,CGRectGetHeight(self.scanerView.frame) / h);
  151. } else if (orientation == AVCaptureVideoOrientationLandscapeLeft){
  152. rect = CGRectMake(1 - CGRectGetMaxX(self.scanerView.frame) / w, 1 - CGRectGetMaxY(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w,CGRectGetHeight(self.scanerView.frame) / h);
  153. } else {
  154. rect = CGRectMake(CGRectGetMinY(self.scanerView.frame) / h, CGRectGetMinX(self.scanerView.frame) / w, CGRectGetHeight(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w);
  155. }
  156. [self.output setRectOfInterest:rect];
  157. [self.view bringSubviewToFront:self.scanerView];
  158. CGRect scanlineFrame = CGRectMake(0, CGRectGetMidY(self.scanerView.bounds) - 0.5, CGRectGetWidth(self.scanerView.bounds), 1);
  159. self.scanLineLayer.frame = scanlineFrame;
  160. [self.scanerView.layer insertSublayer:self.scanLineLayer atIndex:0];
  161. UIBezierPath *path = [UIBezierPath bezierPathWithRect:self.maskView.bounds];
  162. UIBezierPath *subPath = [UIBezierPath bezierPathWithRect:self.scanerView.frame];
  163. [path appendPath:subPath];
  164. if (!self.maskLayer) {
  165. self.maskLayer = [CAShapeLayer layer];
  166. self.maskLayer.fillColor = [UIColor colorWithRed:0.2 green:0.2 blue:0.2 alpha:0.5].CGColor;
  167. self.maskLayer.fillRule = kCAFillRuleEvenOdd;
  168. }
  169. self.maskLayer.path = path.CGPath;
  170. if (!self.rectLayer) {
  171. self.rectLayer = [CAShapeLayer layer];
  172. self.rectLayer.fillColor = [UIColor clearColor].CGColor;
  173. self.rectLayer.strokeColor = [UIColor whiteColor].CGColor;
  174. self.rectLayer.lineWidth = 0.5f;
  175. }
  176. self.rectLayer.path = subPath.CGPath;
  177. [self.maskLayer addSublayer:self.rectLayer];
  178. [self.maskView.layer addSublayer:self.maskLayer];
  179. }
  180. - (CAGradientLayer *)scanLineLayer {
  181. if (!_scanLineLayer) {
  182. CAGradientLayer *gradientLayer = [CAGradientLayer layer];
  183. //set gradient colors
  184. // 数组成员接受 CGColorRef 类型的值
  185. gradientLayer.colors = @[(__bridge id)[UIColor colorWithRed:1 green:0 blue:0 alpha:0.2].CGColor,(__bridge id)[UIColor redColor].CGColor,(__bridge id)[UIColor colorWithRed:1 green:0 blue:0 alpha:0.2].CGColor];
  186. gradientLayer.locations = @[@2.5,@0.5,@0.75];
  187. gradientLayer.startPoint = CGPointMake(0, 0);
  188. gradientLayer.endPoint = CGPointMake(1, 0);
  189. _scanLineLayer = gradientLayer;
  190. }
  191. return _scanLineLayer;
  192. }
  193. #pragma mark - Private
  194. - (void)didCancel {
  195. if (self.QRCodeViewControllerDidCanceled) {
  196. self.QRCodeViewControllerDidCanceled(self);
  197. if (self.navigationController) {
  198. [self.navigationController setNavigationBarHidden:self.navigationBarStatus animated:NO];
  199. }
  200. }
  201. }
  202. #pragma mark - Init
  203. - (void)initCapture {
  204. self.scannerInitial = NO;
  205. if (![self camerAuthorization]) {
  206. NSDictionary* infoDict =[[NSBundle mainBundle] infoDictionary];
  207. NSString *appName = [infoDict objectForKey:@"CFBundleDisplayName"];
  208. if (!appName) {
  209. appName = [infoDict objectForKey:@"CFBundleName"];
  210. }
  211. __weak typeof(self) weakSelf = self;
  212. UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"Warning" message:[NSString stringWithFormat:@"Camera access denied, please change %@ setting, allow App use camera. (setting -> privacy -> camera enable %@)",[UIDevice currentDevice].model,appName] preferredStyle:UIAlertControllerStyleAlert];
  213. UIAlertAction *action = [UIAlertAction actionWithTitle:@"Ok" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
  214. [weakSelf didCancel];
  215. }];
  216. [alert addAction:action];
  217. [self presentViewController:alert animated:YES completion:nil];
  218. return;
  219. }
  220. self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  221. NSError *inputError;
  222. self.input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:&inputError];
  223. if (inputError) {
  224. NSLog(@"init scanner input error: %@",inputError);
  225. return;
  226. }
  227. self.session = [[AVCaptureSession alloc] init];
  228. [self.session setSessionPreset:AVCaptureSessionPresetHigh];
  229. if ([self.session canAddInput:self.input]) {
  230. [self.session addInput:self.input];
  231. } else {
  232. NSLog(@"init scanner can't add input");
  233. return;
  234. }
  235. self.output = [[AVCaptureMetadataOutput alloc] init];
  236. [self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
  237. if ([self.session canAddOutput:self.output]) {
  238. [self.session addOutput:self.output];
  239. } else {
  240. NSLog(@"init scanner can't add output");
  241. return;
  242. }
  243. NSLog(@"%@", [_output availableMetadataObjectTypes]);
  244. NSString* log = [NSString stringWithFormat:@"%@ \n",[_output availableMetadataObjectTypes]];
  245. self.tvdbg_log.text = [self.tvdbg_log.text stringByAppendingString:log];
  246. if(self.scanType==RAScanTypeAll)
  247. {
  248. self.output.metadataObjectTypes = self.output.availableMetadataObjectTypes;
  249. }
  250. else if(self.scanType == RAScanTypeBarcode)
  251. {
  252. self.output.metadataObjectTypes = @[
  253. // AVMetadataObjectTypeQRCode,
  254. AVMetadataObjectTypeEAN13Code,
  255. AVMetadataObjectTypeEAN8Code,
  256. AVMetadataObjectTypeUPCECode,
  257. AVMetadataObjectTypeCode39Code,
  258. AVMetadataObjectTypeCode39Mod43Code,
  259. AVMetadataObjectTypeCode93Code,
  260. AVMetadataObjectTypeCode128Code
  261. // AVMetadataObjectTypePDF417Code
  262. ];
  263. }
  264. else
  265. {
  266. self.output.metadataObjectTypes = @[
  267. AVMetadataObjectTypeQRCode,
  268. // AVMetadataObjectTypeEAN13Code,
  269. // AVMetadataObjectTypeEAN8Code,
  270. // AVMetadataObjectTypeUPCECode,
  271. // AVMetadataObjectTypeCode39Code,
  272. // AVMetadataObjectTypeCode39Mod43Code,
  273. // AVMetadataObjectTypeCode93Code,
  274. // AVMetadataObjectTypeCode128Code
  275. AVMetadataObjectTypePDF417Code
  276. ];
  277. }
  278. log = [NSString stringWithFormat:@"set : \n %@ \n",self.output.metadataObjectTypes];
  279. self.tvdbg_log.text = [self.tvdbg_log.text stringByAppendingString:log];
  280. self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
  281. self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  282. [self.previewContainer.layer addSublayer:self.previewLayer];
  283. self.scannerInitial = YES;
  284. }
  285. - (BOOL)camerAuthorization {
  286. AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
  287. if (status == AVAuthorizationStatusRestricted || status == AVAuthorizationStatusDenied) {
  288. return NO;
  289. }
  290. return YES;
  291. }
  292. #pragma mark - AVCaptureMetadataOutputObjectsDelegate
  293. - (void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
  294. if (!self.scannerEnable) {
  295. return;
  296. }
  297. NSString* log = [NSString stringWithFormat:@"%@ \n",@"didOutputMetadataObjects"];
  298. self.tvdbg_log.text = [self.tvdbg_log.text stringByAppendingString:log];
  299. if ([metadataObjects count] > 0) {
  300. [self.session stopRunning];
  301. AVMetadataMachineReadableCodeObject *metadataObject = [metadataObjects objectAtIndex:0];
  302. log = [NSString stringWithFormat:@"type %@ \n",metadataObject.type];
  303. self.tvdbg_log.text = [self.tvdbg_log.text stringByAppendingString:log];
  304. NSString *codeValue = metadataObject.stringValue;
  305. if (self.QRCodeViewControllerDidCompletion) {
  306. self.QRCodeViewControllerDidCompletion(self, codeValue);
  307. if (self.navigationController) {
  308. [self.navigationController setNavigationBarHidden:self.navigationBarStatus animated:NO];
  309. }
  310. }
  311. }
  312. }
  313. #pragma mark - Action
  314. - (IBAction)scannerBtnTouchDown:(UIButton *)sender {
  315. self.scannerEnable = YES;
  316. }
  317. - (IBAction)scannerBtnTouchUp:(UIButton *)sender {
  318. self.scannerEnable = NO;
  319. }
  320. - (IBAction)scannerBtnTouchUpOutSide:(UIButton *)sender {
  321. self.scannerEnable = NO;
  322. }
  323. - (IBAction)scannerBtnTouchCancel:(UIButton *)sender {
  324. self.scannerEnable = NO;
  325. }
  326. - (IBAction)backBtnClick:(UIButton *)sender {
  327. [self didCancel];
  328. }
  329. #pragma mark - Utils
  330. + (UIImage *)imageWithColor:(UIColor *)color Size:(CGSize)size {
  331. UIGraphicsBeginImageContextWithOptions(size, NO, [UIScreen mainScreen].scale);
  332. CGContextRef ctx = UIGraphicsGetCurrentContext();
  333. CGContextAddEllipseInRect(ctx, CGRectMake(5, 5, size.width - 10, size.height - 10));
  334. CGContextSetFillColorWithColor(ctx, color.CGColor);
  335. CGContextSetStrokeColorWithColor(ctx, [UIColor whiteColor].CGColor);
  336. CGContextSetLineWidth(ctx, 3.0f);
  337. CGContextDrawPath(ctx, kCGPathFillStroke);
  338. UIImage *img = UIGraphicsGetImageFromCurrentImageContext();
  339. UIGraphicsEndImageContext();
  340. return img;
  341. }
  342. @end