RAQRCodeScannerViewController.m 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333
  1. //
  2. // RAQRCodeScannerViewController.m
  3. // Apex And Drivers
  4. //
  5. // Created by Jack on 2018/6/5.
  6. // Copyright © 2018年 USAI. All rights reserved.
  7. //
  8. #import "RAQRCodeScannerViewController.h"
  9. #import <AVKit/AVKit.h>
  10. @interface RAQRCodeScannerViewController () <AVCaptureMetadataOutputObjectsDelegate>
  11. @property (strong, nonatomic) IBOutlet UIView *scanerView;
  12. @property (nonatomic,strong) IBOutlet UIView *previewContainer;
  13. @property (strong, nonatomic) IBOutlet UIView *maskView;
  14. @property (strong, nonatomic) IBOutlet UIButton *scanBtn;
  15. @property (nonatomic,strong) AVCaptureDevice *device;
  16. @property (nonatomic,strong) AVCaptureDeviceInput *input;
  17. @property (nonatomic,strong) AVCaptureMetadataOutput *output;
  18. @property (nonatomic,strong) AVCaptureSession *session;
  19. @property (nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;
  20. @property (nonatomic,assign) BOOL scannerEnable;
  21. @property (nonatomic,assign) BOOL scannerInitial;
  22. @property (nonatomic,strong) CAGradientLayer *scanLineLayer;
  23. @property (nonatomic,strong) CAShapeLayer *maskLayer;
  24. @property (nonatomic,strong) CAShapeLayer *rectLayer;
  25. @end
  26. @implementation RAQRCodeScannerViewController
  27. + (NSString *)storyboardID {
  28. return NSStringFromClass([self class]);
  29. }
  30. + (instancetype)viewControllerFromStoryboard {
  31. RAQRCodeScannerViewController *scannerVC = [[UIStoryboard storyboardWithName:@"QRCode" bundle:nil] instantiateViewControllerWithIdentifier:[self storyboardID]];
  32. return scannerVC;
  33. }
  34. - (void)viewDidLoad {
  35. [super viewDidLoad];
  36. // Do any additional setup after loading the view.
  37. self.scanerView.layer.borderColor = [UIColor blackColor].CGColor;
  38. self.scanerView.layer.borderWidth = 0.5f;
  39. UIImage *normal_img = [self.class imageWithColor:[UIColor redColor] Size:CGSizeMake(60, 60)];
  40. UIImage *highlight_img = [self.class imageWithColor:[UIColor greenColor] Size:CGSizeMake(60, 60)];
  41. [self.scanBtn setImage:normal_img forState:UIControlStateNormal];
  42. [self.scanBtn setImage:highlight_img forState:UIControlStateHighlighted];
  43. [self initCapture];
  44. }
  45. //- (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation {
  46. // return UIInterfaceOrientationPortrait;
  47. //}
  48. - (AVCaptureVideoOrientation)captureVideoOrientation {
  49. AVCaptureVideoOrientation result;
  50. UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
  51. switch (deviceOrientation) {
  52. case UIDeviceOrientationPortrait:
  53. case UIDeviceOrientationFaceUp:
  54. case UIDeviceOrientationFaceDown:
  55. result = AVCaptureVideoOrientationPortrait;
  56. break;
  57. case UIDeviceOrientationPortraitUpsideDown:
  58. //如果这里设置成AVCaptureVideoOrientationPortraitUpsideDown,则视频方向和拍摄时的方向是相反的。
  59. result = AVCaptureVideoOrientationPortrait;
  60. break;
  61. case UIDeviceOrientationLandscapeLeft:
  62. result = AVCaptureVideoOrientationLandscapeRight;
  63. break;
  64. case UIDeviceOrientationLandscapeRight:
  65. result = AVCaptureVideoOrientationLandscapeLeft;
  66. break;
  67. default:
  68. result = AVCaptureVideoOrientationPortrait;
  69. break;
  70. }
  71. return result;
  72. }
  73. - (void)viewDidLayoutSubviews {
  74. [super viewDidLayoutSubviews];
  75. self.previewLayer.frame = self.previewContainer.bounds;
  76. AVCaptureVideoOrientation orientation = [self captureVideoOrientation];
  77. if (self.previewLayer.connection.isVideoOrientationSupported) {
  78. self.previewLayer.connection.videoOrientation = orientation;
  79. }
  80. CGFloat w = CGRectGetWidth(self.previewContainer.bounds);
  81. CGFloat h = CGRectGetHeight(self.previewContainer.bounds);
  82. /**
  83. rectOfInterest
  84. 竖屏 x轴和y轴要交换一下
  85. Left、Right:Home键反方向X为0
  86. */
  87. CGRect rect = CGRectMake(CGRectGetMinY(self.scanerView.frame) / h, CGRectGetMinX(self.scanerView.frame) / w, CGRectGetHeight(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w);
  88. if (orientation == AVCaptureVideoOrientationLandscapeRight) {
  89. rect = CGRectMake(CGRectGetMinX(self.scanerView.frame) / w, CGRectGetMinY(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w,CGRectGetHeight(self.scanerView.frame) / h);
  90. } else if (orientation == AVCaptureVideoOrientationLandscapeLeft){
  91. rect = CGRectMake(1 - CGRectGetMaxX(self.scanerView.frame) / w, 1 - CGRectGetMaxY(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w,CGRectGetHeight(self.scanerView.frame) / h);
  92. } else {
  93. rect = CGRectMake(CGRectGetMinY(self.scanerView.frame) / h, CGRectGetMinX(self.scanerView.frame) / w, CGRectGetHeight(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w);
  94. }
  95. [self.output setRectOfInterest:rect];
  96. [self.view bringSubviewToFront:self.scanerView];
  97. CGRect scanlineFrame = CGRectMake(0, CGRectGetMidY(self.scanerView.bounds) - 0.5, CGRectGetWidth(self.scanerView.bounds), 1);
  98. self.scanLineLayer.frame = scanlineFrame;
  99. [self.scanerView.layer insertSublayer:self.scanLineLayer atIndex:0];
  100. UIBezierPath *path = [UIBezierPath bezierPathWithRect:self.maskView.bounds];
  101. UIBezierPath *subPath = [UIBezierPath bezierPathWithRect:self.scanerView.frame];
  102. [path appendPath:subPath];
  103. if (!self.maskLayer) {
  104. self.maskLayer = [CAShapeLayer layer];
  105. self.maskLayer.fillColor = [UIColor colorWithRed:0.2 green:0.2 blue:0.2 alpha:0.5].CGColor;
  106. self.maskLayer.fillRule = kCAFillRuleEvenOdd;
  107. }
  108. self.maskLayer.path = path.CGPath;
  109. if (!self.rectLayer) {
  110. self.rectLayer = [CAShapeLayer layer];
  111. self.rectLayer.fillColor = [UIColor clearColor].CGColor;
  112. self.rectLayer.strokeColor = [UIColor whiteColor].CGColor;
  113. self.rectLayer.lineWidth = 0.5f;
  114. }
  115. self.rectLayer.path = subPath.CGPath;
  116. [self.maskLayer addSublayer:self.rectLayer];
  117. [self.maskView.layer addSublayer:self.maskLayer];
  118. }
  119. - (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id<UIViewControllerTransitionCoordinator>)coordinator {
  120. [super viewWillTransitionToSize:size withTransitionCoordinator:coordinator];
  121. // NSLog(@"device orientation: %ld & statusbar orientaion: %ld",[UIDevice currentDevice].orientation,[UIApplication sharedApplication].statusBarOrientation);
  122. }
  123. - (void)viewWillAppear:(BOOL)animated {
  124. [super viewWillAppear:animated];
  125. if (self.scannerInitial) {
  126. [self.session startRunning];
  127. }
  128. }
  129. - (void)viewWillDisappear:(BOOL)animated {
  130. [super viewWillDisappear:animated];
  131. if ([self.session isRunning]) {
  132. [self.session stopRunning];
  133. }
  134. }
  135. - (void)didReceiveMemoryWarning {
  136. [super didReceiveMemoryWarning];
  137. // Dispose of any resources that can be recreated.
  138. }
  139. - (CAGradientLayer *)scanLineLayer {
  140. if (!_scanLineLayer) {
  141. CAGradientLayer *gradientLayer = [CAGradientLayer layer];
  142. //set gradient colors
  143. // 数组成员接受 CGColorRef 类型的值
  144. gradientLayer.colors = @[(__bridge id)[UIColor colorWithRed:1 green:0 blue:0 alpha:0.2].CGColor,(__bridge id)[UIColor redColor].CGColor,(__bridge id)[UIColor colorWithRed:1 green:0 blue:0 alpha:0.2].CGColor];
  145. gradientLayer.locations = @[@2.5,@0.5,@0.75];
  146. gradientLayer.startPoint = CGPointMake(0, 0);
  147. gradientLayer.endPoint = CGPointMake(1, 0);
  148. _scanLineLayer = gradientLayer;
  149. }
  150. return _scanLineLayer;
  151. }
  152. #pragma mark - Init
  153. - (void)initCapture {
  154. self.scannerInitial = NO;
  155. if (![self camerAuthorization]) {
  156. NSDictionary* infoDict =[[NSBundle mainBundle] infoDictionary];
  157. NSString *appName = [infoDict objectForKey:@"CFBundleName"];
  158. __weak typeof(self) weakSelf = self;
  159. UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"Warning" message:[NSString stringWithFormat:@"Camera access denied, please change %@ setting, allow App use camera. (setting -> privacy -> camera enable %@)",[UIDevice currentDevice].model,appName] preferredStyle:UIAlertControllerStyleAlert];
  160. UIAlertAction *action = [UIAlertAction actionWithTitle:@"Ok" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
  161. [weakSelf dismissViewControllerAnimated:YES completion:nil];
  162. }];
  163. [alert addAction:action];
  164. [self presentViewController:alert animated:YES completion:nil];
  165. return;
  166. }
  167. self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  168. NSError *inputError;
  169. self.input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:&inputError];
  170. if (inputError) {
  171. NSLog(@"init scanner input error: %@",inputError);
  172. return;
  173. }
  174. self.session = [[AVCaptureSession alloc] init];
  175. [self.session setSessionPreset:AVCaptureSessionPresetHigh];
  176. if ([self.session canAddInput:self.input]) {
  177. [self.session addInput:self.input];
  178. } else {
  179. NSLog(@"init scanner can't add input");
  180. return;
  181. }
  182. self.output = [[AVCaptureMetadataOutput alloc] init];
  183. [self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
  184. if ([self.session canAddOutput:self.output]) {
  185. [self.session addOutput:self.output];
  186. } else {
  187. NSLog(@"init scanner can't add output");
  188. return;
  189. }
  190. self.output.metadataObjectTypes = @[
  191. AVMetadataObjectTypeQRCode,
  192. AVMetadataObjectTypeEAN13Code,
  193. AVMetadataObjectTypeEAN8Code,
  194. AVMetadataObjectTypeUPCECode,
  195. AVMetadataObjectTypeCode39Code,
  196. AVMetadataObjectTypeCode39Mod43Code,
  197. AVMetadataObjectTypeCode93Code,
  198. AVMetadataObjectTypeCode128Code,
  199. AVMetadataObjectTypePDF417Code
  200. ];
  201. self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
  202. self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  203. [self.previewContainer.layer addSublayer:self.previewLayer];
  204. self.scannerInitial = YES;
  205. }
  206. - (BOOL)camerAuthorization {
  207. AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
  208. if (status == AVAuthorizationStatusRestricted || status == AVAuthorizationStatusDenied) {
  209. return NO;
  210. }
  211. return YES;
  212. }
  213. #pragma mark - AVCaptureMetadataOutputObjectsDelegate
  214. - (void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
  215. if (!self.scannerEnable) {
  216. return;
  217. }
  218. if ([metadataObjects count] > 0) {
  219. [self.session stopRunning];
  220. AVMetadataMachineReadableCodeObject *metadataObject = [metadataObjects objectAtIndex:0];
  221. NSString *codeValue = metadataObject.stringValue;
  222. if (self.completion) {
  223. self.completion(codeValue);
  224. }
  225. [self.navigationController popViewControllerAnimated:YES];
  226. }
  227. }
  228. #pragma mark - Action
  229. - (IBAction)scannerBtnTouchDown:(UIButton *)sender {
  230. self.scannerEnable = YES;
  231. }
  232. - (IBAction)scannerBtnTouchUp:(UIButton *)sender {
  233. self.scannerEnable = NO;
  234. }
  235. - (IBAction)scannerBtnTouchUpOutSide:(UIButton *)sender {
  236. self.scannerEnable = NO;
  237. }
  238. - (IBAction)scannerBtnTouchCancel:(UIButton *)sender {
  239. self.scannerEnable = NO;
  240. }
  241. #pragma mark - Utils
  242. + (UIImage *)imageWithColor:(UIColor *)color Size:(CGSize)size {
  243. UIGraphicsBeginImageContextWithOptions(size, NO, [UIScreen mainScreen].scale);
  244. CGContextRef ctx = UIGraphicsGetCurrentContext();
  245. CGContextAddEllipseInRect(ctx, CGRectMake(5, 5, size.width - 10, size.height - 10));
  246. CGContextSetFillColorWithColor(ctx, color.CGColor);
  247. CGContextSetStrokeColorWithColor(ctx, [UIColor whiteColor].CGColor);
  248. CGContextSetLineWidth(ctx, 3.0f);
  249. CGContextDrawPath(ctx, kCGPathFillStroke);
  250. UIImage *img = UIGraphicsGetImageFromCurrentImageContext();
  251. UIGraphicsEndImageContext();
  252. return img;
  253. }
  254. @end