RAQRCodeScannerViewController.m 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421
  1. //
  2. // RAQRCodeScannerViewController.m
  3. // Apex And Drivers
  4. //
  5. // Created by Jack on 2018/6/5.
  6. // Copyright © 2018年 USAI. All rights reserved.
  7. //
  8. #import "RAQRCodeScannerViewController.h"
  9. #import <AVKit/AVKit.h>
  10. @interface RAQRCodeScannerViewController () <AVCaptureMetadataOutputObjectsDelegate>
  11. @property (strong, nonatomic) IBOutlet UIView *scanerView;
  12. @property (nonatomic,strong) IBOutlet UIView *previewContainer;
  13. @property (strong, nonatomic) IBOutlet UIView *maskView;
  14. @property (strong, nonatomic) IBOutlet UIButton *scanBtn;
  15. @property (strong, nonatomic) IBOutlet UIButton *backBtn;
  16. @property (nonatomic,strong) AVCaptureDevice *device;
  17. @property (nonatomic,strong) AVCaptureDeviceInput *input;
  18. @property (nonatomic,strong) AVCaptureMetadataOutput *output;
  19. @property (nonatomic,strong) AVCaptureSession *session;
  20. @property (nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;
  21. @property (nonatomic,assign) BOOL scannerEnable;
  22. @property (nonatomic,assign) BOOL scannerInitial;
  23. @property (nonatomic,strong) CAGradientLayer *scanLineLayer;
  24. @property (nonatomic,strong) CAShapeLayer *maskLayer;
  25. @property (nonatomic,strong) CAShapeLayer *rectLayer;
  26. @property (nonatomic,assign) BOOL navigationBarStatus;
  27. @property (weak, nonatomic) IBOutlet UITextView *tvdbg_log;
  28. @end
  29. @implementation RAQRCodeScannerViewController
  30. + (NSString *)storyboardID {
  31. return NSStringFromClass([self class]);
  32. }
  33. + (instancetype)viewControllerFromStoryboard {
  34. RAQRCodeScannerViewController *scannerVC = [[UIStoryboard storyboardWithName:@"QRCode" bundle:nil] instantiateViewControllerWithIdentifier:[self storyboardID]];
  35. return scannerVC;
  36. }
  37. - (void)viewDidLoad {
  38. [super viewDidLoad];
  39. // Do any additional setup after loading the view.
  40. if (self.navigationController) {
  41. self.navigationBarStatus = self.navigationController.navigationBarHidden;
  42. [self.navigationController setNavigationBarHidden:YES animated:NO];
  43. }
  44. self.scanerView.layer.borderColor = [UIColor blackColor].CGColor;
  45. self.scanerView.layer.borderWidth = 0.5f;
  46. UIImage *normal_img = [self.class imageWithColor:[UIColor redColor] Size:CGSizeMake(60, 60)];
  47. UIImage *highlight_img = [self.class imageWithColor:[UIColor greenColor] Size:CGSizeMake(60, 60)];
  48. [self.scanBtn setImage:normal_img forState:UIControlStateNormal];
  49. [self.scanBtn setImage:highlight_img forState:UIControlStateHighlighted];
  50. if ([self camerAuthorization]) {
  51. [self initCapture];
  52. } else {
  53. [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
  54. dispatch_async(dispatch_get_main_queue(), ^{
  55. if (granted) {
  56. [self initCapture];
  57. [self resetPreview];
  58. if (![self.session isRunning]) {
  59. [self.session startRunning];
  60. }
  61. } else {
  62. NSDictionary* infoDict =[[NSBundle mainBundle] infoDictionary];
  63. NSString *appName = [infoDict objectForKey:@"CFBundleDisplayName"];
  64. if (!appName) {
  65. appName = [infoDict objectForKey:@"CFBundleName"];
  66. }
  67. __weak typeof(self) weakSelf = self;
  68. UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"Warning" message:[NSString stringWithFormat:@"Camera access denied, please change %@ setting, allow App use camera. (setting -> privacy -> camera enable %@)",[UIDevice currentDevice].model,appName] preferredStyle:UIAlertControllerStyleAlert];
  69. UIAlertAction *action = [UIAlertAction actionWithTitle:@"Ok" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
  70. [weakSelf didCancel];
  71. }];
  72. [alert addAction:action];
  73. [self presentViewController:alert animated:YES completion:nil];
  74. }
  75. });
  76. }];
  77. }
  78. }
  79. //- (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation {
  80. // return UIInterfaceOrientationPortrait;
  81. //}
  82. - (AVCaptureVideoOrientation)captureVideoOrientation {
  83. AVCaptureVideoOrientation result;
  84. UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
  85. switch (deviceOrientation) {
  86. case UIDeviceOrientationPortrait:
  87. case UIDeviceOrientationFaceUp:
  88. case UIDeviceOrientationFaceDown:
  89. result = AVCaptureVideoOrientationPortrait;
  90. break;
  91. case UIDeviceOrientationPortraitUpsideDown:
  92. //如果这里设置成AVCaptureVideoOrientationPortraitUpsideDown,则视频方向和拍摄时的方向是相反的。
  93. result = AVCaptureVideoOrientationPortrait;
  94. break;
  95. case UIDeviceOrientationLandscapeLeft:
  96. result = AVCaptureVideoOrientationLandscapeRight;
  97. break;
  98. case UIDeviceOrientationLandscapeRight:
  99. result = AVCaptureVideoOrientationLandscapeLeft;
  100. break;
  101. default:
  102. result = AVCaptureVideoOrientationPortrait;
  103. break;
  104. }
  105. return result;
  106. }
  107. - (void)viewDidLayoutSubviews {
  108. [super viewDidLayoutSubviews];
  109. [self resetPreview];
  110. }
  111. - (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id<UIViewControllerTransitionCoordinator>)coordinator {
  112. [super viewWillTransitionToSize:size withTransitionCoordinator:coordinator];
  113. // NSLog(@"device orientation: %ld & statusbar orientaion: %ld",[UIDevice currentDevice].orientation,[UIApplication sharedApplication].statusBarOrientation);
  114. }
  115. - (void)viewWillAppear:(BOOL)animated {
  116. [super viewWillAppear:animated];
  117. if (self.scannerInitial) {
  118. [self.session startRunning];
  119. }
  120. }
  121. - (void)viewWillDisappear:(BOOL)animated {
  122. [super viewWillDisappear:animated];
  123. if ([self.session isRunning]) {
  124. [self.session stopRunning];
  125. }
  126. }
  127. - (void)didReceiveMemoryWarning {
  128. [super didReceiveMemoryWarning];
  129. // Dispose of any resources that can be recreated.
  130. }
  131. - (void)resetPreview {
  132. self.previewLayer.frame = self.previewContainer.bounds;
  133. AVCaptureVideoOrientation orientation = [self captureVideoOrientation];
  134. if (self.previewLayer.connection.isVideoOrientationSupported) {
  135. self.previewLayer.connection.videoOrientation = orientation;
  136. }
  137. CGFloat w = CGRectGetWidth(self.previewContainer.bounds);
  138. CGFloat h = CGRectGetHeight(self.previewContainer.bounds);
  139. /**
  140. rectOfInterest
  141. 竖屏 x轴和y轴要交换一下
  142. Left、Right:Home键反方向X为0
  143. */
  144. CGRect rect = CGRectMake(CGRectGetMinY(self.scanerView.frame) / h, CGRectGetMinX(self.scanerView.frame) / w, CGRectGetHeight(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w);
  145. if (orientation == AVCaptureVideoOrientationLandscapeRight) {
  146. rect = CGRectMake(CGRectGetMinX(self.scanerView.frame) / w, CGRectGetMinY(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w,CGRectGetHeight(self.scanerView.frame) / h);
  147. } else if (orientation == AVCaptureVideoOrientationLandscapeLeft){
  148. rect = CGRectMake(1 - CGRectGetMaxX(self.scanerView.frame) / w, 1 - CGRectGetMaxY(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w,CGRectGetHeight(self.scanerView.frame) / h);
  149. } else {
  150. rect = CGRectMake(CGRectGetMinY(self.scanerView.frame) / h, CGRectGetMinX(self.scanerView.frame) / w, CGRectGetHeight(self.scanerView.frame) / h, CGRectGetWidth(self.scanerView.frame) / w);
  151. }
  152. // [self.output setRectOfInterest:rect];
  153. [self.view bringSubviewToFront:self.scanerView];
  154. CGRect scanlineFrame = CGRectMake(0, CGRectGetMidY(self.scanerView.bounds) - 0.5, CGRectGetWidth(self.scanerView.bounds), 1);
  155. self.scanLineLayer.frame = scanlineFrame;
  156. [self.scanerView.layer insertSublayer:self.scanLineLayer atIndex:0];
  157. UIBezierPath *path = [UIBezierPath bezierPathWithRect:self.maskView.bounds];
  158. UIBezierPath *subPath = [UIBezierPath bezierPathWithRect:self.scanerView.frame];
  159. [path appendPath:subPath];
  160. if (!self.maskLayer) {
  161. self.maskLayer = [CAShapeLayer layer];
  162. self.maskLayer.fillColor = [UIColor colorWithRed:0.2 green:0.2 blue:0.2 alpha:0.5].CGColor;
  163. self.maskLayer.fillRule = kCAFillRuleEvenOdd;
  164. }
  165. self.maskLayer.path = path.CGPath;
  166. if (!self.rectLayer) {
  167. self.rectLayer = [CAShapeLayer layer];
  168. self.rectLayer.fillColor = [UIColor clearColor].CGColor;
  169. self.rectLayer.strokeColor = [UIColor whiteColor].CGColor;
  170. self.rectLayer.lineWidth = 0.5f;
  171. }
  172. self.rectLayer.path = subPath.CGPath;
  173. [self.maskLayer addSublayer:self.rectLayer];
  174. [self.maskView.layer addSublayer:self.maskLayer];
  175. }
  176. - (CAGradientLayer *)scanLineLayer {
  177. if (!_scanLineLayer) {
  178. CAGradientLayer *gradientLayer = [CAGradientLayer layer];
  179. //set gradient colors
  180. // 数组成员接受 CGColorRef 类型的值
  181. gradientLayer.colors = @[(__bridge id)[UIColor colorWithRed:1 green:0 blue:0 alpha:0.2].CGColor,(__bridge id)[UIColor redColor].CGColor,(__bridge id)[UIColor colorWithRed:1 green:0 blue:0 alpha:0.2].CGColor];
  182. gradientLayer.locations = @[@2.5,@0.5,@0.75];
  183. gradientLayer.startPoint = CGPointMake(0, 0);
  184. gradientLayer.endPoint = CGPointMake(1, 0);
  185. _scanLineLayer = gradientLayer;
  186. }
  187. return _scanLineLayer;
  188. }
  189. #pragma mark - Private
  190. - (void)didCancel {
  191. if (self.QRCodeViewControllerDidCanceled) {
  192. self.QRCodeViewControllerDidCanceled(self);
  193. if (self.navigationController) {
  194. [self.navigationController setNavigationBarHidden:self.navigationBarStatus animated:NO];
  195. }
  196. }
  197. }
  198. #pragma mark - Init
  199. - (void)initCapture {
  200. self.scannerInitial = NO;
  201. if (![self camerAuthorization]) {
  202. NSDictionary* infoDict =[[NSBundle mainBundle] infoDictionary];
  203. NSString *appName = [infoDict objectForKey:@"CFBundleDisplayName"];
  204. if (!appName) {
  205. appName = [infoDict objectForKey:@"CFBundleName"];
  206. }
  207. __weak typeof(self) weakSelf = self;
  208. UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"Warning" message:[NSString stringWithFormat:@"Camera access denied, please change %@ setting, allow App use camera. (setting -> privacy -> camera enable %@)",[UIDevice currentDevice].model,appName] preferredStyle:UIAlertControllerStyleAlert];
  209. UIAlertAction *action = [UIAlertAction actionWithTitle:@"Ok" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
  210. [weakSelf didCancel];
  211. }];
  212. [alert addAction:action];
  213. [self presentViewController:alert animated:YES completion:nil];
  214. return;
  215. }
  216. self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  217. NSError *inputError;
  218. self.input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:&inputError];
  219. if (inputError) {
  220. NSLog(@"init scanner input error: %@",inputError);
  221. return;
  222. }
  223. self.session = [[AVCaptureSession alloc] init];
  224. [self.session setSessionPreset:AVCaptureSessionPresetHigh];
  225. if ([self.session canAddInput:self.input]) {
  226. [self.session addInput:self.input];
  227. } else {
  228. NSLog(@"init scanner can't add input");
  229. return;
  230. }
  231. self.output = [[AVCaptureMetadataOutput alloc] init];
  232. [self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
  233. if ([self.session canAddOutput:self.output]) {
  234. [self.session addOutput:self.output];
  235. } else {
  236. NSLog(@"init scanner can't add output");
  237. return;
  238. }
  239. NSLog(@"%@", [_output availableMetadataObjectTypes]);
  240. NSString* log = [NSString stringWithFormat:@"%@ \n",[_output availableMetadataObjectTypes]];
  241. self.tvdbg_log.text = [self.tvdbg_log.text stringByAppendingString:log];
  242. self.output.metadataObjectTypes = @[
  243. AVMetadataObjectTypeQRCode,
  244. // AVMetadataObjectTypeEAN13Code,
  245. // AVMetadataObjectTypeEAN8Code,
  246. // AVMetadataObjectTypeUPCECode,
  247. // AVMetadataObjectTypeCode39Code,
  248. // AVMetadataObjectTypeCode39Mod43Code,
  249. // AVMetadataObjectTypeCode93Code,
  250. AVMetadataObjectTypeCode128Code
  251. // AVMetadataObjectTypePDF417Code
  252. ];
  253. log = [NSString stringWithFormat:@"set : \n %@ \n",self.output.metadataObjectTypes];
  254. self.tvdbg_log.text = [self.tvdbg_log.text stringByAppendingString:log];
  255. self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
  256. self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  257. [self.previewContainer.layer addSublayer:self.previewLayer];
  258. self.scannerInitial = YES;
  259. }
  260. - (BOOL)camerAuthorization {
  261. AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
  262. if (status == AVAuthorizationStatusRestricted || status == AVAuthorizationStatusDenied) {
  263. return NO;
  264. }
  265. return YES;
  266. }
  267. #pragma mark - AVCaptureMetadataOutputObjectsDelegate
  268. - (void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
  269. if (!self.scannerEnable) {
  270. return;
  271. }
  272. NSString* log = [NSString stringWithFormat:@"%@ \n",@"didOutputMetadataObjects"];
  273. self.tvdbg_log.text = [self.tvdbg_log.text stringByAppendingString:log];
  274. if ([metadataObjects count] > 0) {
  275. [self.session stopRunning];
  276. AVMetadataMachineReadableCodeObject *metadataObject = [metadataObjects objectAtIndex:0];
  277. log = [NSString stringWithFormat:@"type %@ \n",metadataObject.type];
  278. self.tvdbg_log.text = [self.tvdbg_log.text stringByAppendingString:log];
  279. NSString *codeValue = metadataObject.stringValue;
  280. if (self.QRCodeViewControllerDidCompletion) {
  281. self.QRCodeViewControllerDidCompletion(self, codeValue);
  282. if (self.navigationController) {
  283. [self.navigationController setNavigationBarHidden:self.navigationBarStatus animated:NO];
  284. }
  285. }
  286. }
  287. }
  288. #pragma mark - Action
  289. - (IBAction)scannerBtnTouchDown:(UIButton *)sender {
  290. self.scannerEnable = YES;
  291. }
  292. - (IBAction)scannerBtnTouchUp:(UIButton *)sender {
  293. self.scannerEnable = NO;
  294. }
  295. - (IBAction)scannerBtnTouchUpOutSide:(UIButton *)sender {
  296. self.scannerEnable = NO;
  297. }
  298. - (IBAction)scannerBtnTouchCancel:(UIButton *)sender {
  299. self.scannerEnable = NO;
  300. }
  301. - (IBAction)backBtnClick:(UIButton *)sender {
  302. [self didCancel];
  303. }
  304. #pragma mark - Utils
  305. + (UIImage *)imageWithColor:(UIColor *)color Size:(CGSize)size {
  306. UIGraphicsBeginImageContextWithOptions(size, NO, [UIScreen mainScreen].scale);
  307. CGContextRef ctx = UIGraphicsGetCurrentContext();
  308. CGContextAddEllipseInRect(ctx, CGRectMake(5, 5, size.width - 10, size.height - 10));
  309. CGContextSetFillColorWithColor(ctx, color.CGColor);
  310. CGContextSetStrokeColorWithColor(ctx, [UIColor whiteColor].CGColor);
  311. CGContextSetLineWidth(ctx, 3.0f);
  312. CGContextDrawPath(ctx, kCGPathFillStroke);
  313. UIImage *img = UIGraphicsGetImageFromCurrentImageContext();
  314. UIGraphicsEndImageContext();
  315. return img;
  316. }
  317. @end