/* Copyright 2013 Scott Logic Ltd Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #import "ScannerViewController.h" @import AVFoundation; #import #import "SCShapeView.h" #import "ScannerControllerView.h" //#import "DetailViewController.h" #import "const.h" #import "AppDelegate.h" //#import "MainViewController.h" //#import "ContactListViewController.h" //#import "CartUtils.h" #define SCANNER_TARGET_DETAIL 0 #define SCANNER_TARGET_CART 1 //#import "ScannerControlViewController.h" @interface ScannerViewController () { AVCaptureVideoPreviewLayer *_previewLayer; SCShapeView *_boundingBox; NSTimer *_boxHideTimer; UILabel *_decodedMessage; } @end @implementation ScannerViewController //- (UIInterfaceOrientationMask)supportedInterfaceOrientations //{ // return UIInterfaceOrientationMaskPortrait; //} //- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation //{ // // return (toInterfaceOrientation == UIInterfaceOrientationPortrait); // //} - (BOOL)shouldAutorotate { return false; } -(void)didRotateFromInterfaceOrientation:(UIInterfaceOrientation)fromInterfaceOrientation { [super didRotateFromInterfaceOrientation:fromInterfaceOrientation]; CGRect rect=self.view.bounds; DebugLog(@"%@",NSStringFromCGRect(self.focusZone.frame)); // rect.origin.y=rect.origin.y+40; // rect.size.height = rect.size.height-40; [CATransaction begin]; [CATransaction setAnimationDuration:0.5]; [CATransaction setAnimationTimingFunction:[CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut]]; // [self updatePreviewLayerForOrientation:toInterfaceOrientation]; _previewLayer.position = CGPointMake(CGRectGetMidX(rect), CGRectGetMidY(rect)); // self.backgroundView.highLightRect = CGRectInset(self.focusZone.frame,6,6); _previewLayer.bounds = rect; UIInterfaceOrientation orientation = [[UIApplication sharedApplication]statusBarOrientation]; switch (orientation) { case UIInterfaceOrientationPortrait: // [_previewLayer setAffineTransform:CGAffineTransformMakeRotation(0)]; _previewLayer.connection.videoOrientation=AVCaptureVideoOrientationPortrait; break; case UIInterfaceOrientationPortraitUpsideDown: //[_previewLayer setAffineTransform:CGAffineTransformMakeRotation(M_PI)]; _previewLayer.connection.videoOrientation=AVCaptureVideoOrientationPortraitUpsideDown; break; case UIInterfaceOrientationLandscapeLeft: // [_previewLayer setAffineTransform:CGAffineTransformMakeRotation(M_PI/2)]; _previewLayer.connection.videoOrientation=AVCaptureVideoOrientationLandscapeLeft; break; case UIInterfaceOrientationLandscapeRight: // [_previewLayer setAffineTransform:CGAffineTransformMakeRotation(-M_PI/2)]; _previewLayer.connection.videoOrientation=AVCaptureVideoOrientationLandscapeRight; break; default: break; } [CATransaction commit]; } //- (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation //{ // return UIInterfaceOrientationPortrait; //} -(void) playSound { /* SystemSoundID sameViewSoundID; // NSString *filePath = [[NSBundle mainBundle]pathForResource:@"sound" ofType:@"m4r"]; // NSString *thesoundFilePath = [[NSBundle mainBundle] pathForResource:@"sound" ofType:@"wav"]; //音乐文件路径 NSString *path = @"/System/Library/Audio/UISounds/begin_video_record.caf"; CFURLRef thesoundURL = (__bridge CFURLRef) [NSURL fileURLWithPath:path] ; AudioServicesCreateSystemSoundID(thesoundURL, &sameViewSoundID); //变量SoundID与URL对应 DebugLog(@"%u",(unsigned int)sameViewSoundID); AudioServicesPlaySystemSound(1112); //播放SoundID声音 */ CFBundleRef mainBundle; SystemSoundID soundFileObject; mainBundle = CFBundleGetMainBundle (); CFURLRef soundFileURLRef = CFBundleCopyResourceURL ( mainBundle, CFSTR ("softScanBeep" ), CFSTR ("wav" ), NULL ); AudioServicesCreateSystemSoundID ( soundFileURLRef, &soundFileObject ); AudioServicesPlaySystemSound(soundFileObject); } - (void)viewWillAppear:(BOOL)animated { [super viewWillAppear:animated]; [[self navigationController] setNavigationBarHidden:NO animated:NO]; // //[ attemptRotationToDeviceOrientation]; //[UIViewController attemptRotationToDeviceOrientation]; // // [[UIApplication sharedApplication] setStatusBarOrientation:UIInterfaceOrientationPortrait]; // self.view.transform = CGAffineTransformMakeRotation(M_PI/2); // CGRect frame = [UIScreen mainScreen].applicationFrame; // self.view.bounds = CGRectMake(0, 0, 768, 1024); } - (void)onBackClick:(UIButton *)sender { [self.navigationController popViewControllerAnimated:FALSE]; } - (void)viewDidLoad { [super viewDidLoad]; UIBarButtonItem *closeButton = [[UIBarButtonItem alloc] initWithImage:[[UIImage imageNamed:@"back"] imageWithRenderingMode:UIImageRenderingModeAutomatic] style:UIBarButtonItemStylePlain target:self action:@selector( onBackClick:)]; // closeButton.tintColor = UIColorFromRGB(0x996633); self.navigationItem.leftBarButtonItem = closeButton; } - (void)viewDidAppear:(BOOL)animated { [super viewDidAppear:animated]; // 只有在此方法中取得的self.focusZone.frame才正确。 // return; // [self.backgroundView removeFromSuperview]; //self.back = (ScannerControllerView*)self.view; self.backgroundView.highLightRect = CGRectInset(self.focusZone.frame,6,6); [self.backgroundView setNeedsDisplay]; // self.backgroundView.autoresizingMask=0xff; self.handelOutput = false; // self.ScannerControl =[self.storyboard instantiateViewControllerWithIdentifier:@"ScannerControlViewController"]; // self.ScannerControl.Scannerdelegate = self; // self.view.layer. // Do any additional setup after loading the view, typically from a nib. // Create a new AVCaptureSession AVCaptureSession *session = [[AVCaptureSession alloc] init]; session.sessionPreset = AVCaptureSessionPresetHigh; AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; // float factor = device.videoZoomFactor; // float up = device.activeFormat.videoZoomFactorUpscaleThreshold; // [device lockForConfiguration:nil]; // device.videoZoomFactor = device.activeFormat.videoZoomFactorUpscaleThreshold; // [device unlockForConfiguration]; NSError *error = nil; // Want the normal device AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; if(input) { // Add the input to the session [session addInput:input]; } else { __weak typeof(self) weakself = self; // UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"Warning" message:@"Camera access denied, please change iPad setting, allow App use camara. (setting -> privacy -> camera enable RA Image)" preferredStyle:UIAlertControllerStyleAlert]; // UIAlertAction *action = [UIAlertAction actionWithTitle:@"Ok" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) { // [weakself dismissViewControllerAnimated:YES completion:nil]; // }]; // [alert addAction:action]; // [self presentViewController:alert animated:YES completion:nil]; if (![UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) { // 设备不可用 UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"Warning" message:@"Camera is not available" preferredStyle:UIAlertControllerStyleAlert]; UIAlertAction *action = [UIAlertAction actionWithTitle:@"Ok" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) { [weakself dismissViewControllerAnimated:YES completion:nil]; }]; [alert addAction:action]; [self presentViewController:alert animated:YES completion:nil]; return; } // 相机权限 AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]; if (authStatus ==AVAuthorizationStatusRestricted ||//此应用程序没有被授权访问的照片数据。可能是家长控制权限 authStatus ==AVAuthorizationStatusDenied) //用户已经明确否认了这一照片数据的应用程序访问 { // 无权限 引导去开启 // NSURL *url = [NSURL URLWithString:UIApplicationOpenSettingsURLString]; // if ([[UIApplication sharedApplication]canOpenURL:url]) { // [[UIApplication sharedApplication]openURL:url];// 打开权限后返回应用会重启 // } else { NSDictionary* infoDict =[[NSBundle mainBundle] infoDictionary]; NSString *appName = [infoDict objectForKey:@"CFBundleName"]; UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"Warning" message:[NSString stringWithFormat:@"Camera access denied, please change %@ setting, allow App use camera. (setting -> privacy -> camera enable %@)",[UIDevice currentDevice].model,appName] preferredStyle:UIAlertControllerStyleAlert]; UIAlertAction *action = [UIAlertAction actionWithTitle:@"Ok" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) { [weakself dismissViewControllerAnimated:YES completion:nil]; }]; [alert addAction:action]; [self presentViewController:alert animated:YES completion:nil]; // } return; } return; } AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init]; // Have to add the output before setting metadata types [session addOutput:output]; // What different things can we register to recognise? DebugLog(@"%@", [output availableMetadataObjectTypes]); // We're only interested in QR Codes [output setMetadataObjectTypes:@[AVMetadataObjectTypeUPCECode,AVMetadataObjectTypeCode39Code,AVMetadataObjectTypeCode39Mod43Code,AVMetadataObjectTypeEAN13Code,AVMetadataObjectTypeEAN8Code,AVMetadataObjectTypeCode93Code,AVMetadataObjectTypeCode128Code,AVMetadataObjectTypePDF417Code,AVMetadataObjectTypeQRCode,AVMetadataObjectTypeAztecCode]]; DebugLog(@"%@", [output metadataObjectTypes]); // NSArray* supporttype=output.availableMetadataObjectTypes; // [output setMetadataObjectTypes:output.availableMetadataObjectTypes]; // This VC is the delegate. Please call us on the main queue [output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()]; // // CGRect rt1 = self.ScannerControl.scannerZone.frame; // Display on screen _previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:session]; _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; // _previewLayer.orientation= AVCaptureVideoOrientationLandscapeRight; UIInterfaceOrientation orientation = [[UIApplication sharedApplication]statusBarOrientation]; switch (orientation) { // UIInterfaceOrientationPortrait = UIDeviceOrientationPortrait, // UIInterfaceOrientationPortraitUpsideDown = UIDeviceOrientationPortraitUpsideDown, // UIInterfaceOrientationLandscapeLeft = UIDeviceOrientationLandscapeRight, // UIInterfaceOrientationLandscapeRight // AVCaptureVideoOrientationPortrait = 1, // AVCaptureVideoOrientationPortraitUpsideDown = 2, // AVCaptureVideoOrientationLandscapeRight = 3, // AVCaptureVideoOrientationLandscapeLeft = 4, case UIInterfaceOrientationPortrait: _previewLayer.connection.videoOrientation=AVCaptureVideoOrientationPortrait; break; case UIInterfaceOrientationPortraitUpsideDown: _previewLayer.connection.videoOrientation=AVCaptureVideoOrientationPortraitUpsideDown; break; case UIInterfaceOrientationLandscapeLeft: _previewLayer.connection.videoOrientation=AVCaptureVideoOrientationLandscapeLeft; break; case UIInterfaceOrientationLandscapeRight: _previewLayer.connection.videoOrientation=AVCaptureVideoOrientationLandscapeRight; break; default: break; } CGRect rect=self.view.bounds; // rect.origin.y=rect.origin.y+40; // rect.size.height = rect.size.height-40; _previewLayer.bounds = rect; _previewLayer.position = CGPointMake(CGRectGetMidX(self.view.bounds), CGRectGetMidY(self.view.bounds)); // [self.view.layer addSublayer:_previewLayer]; [self.view.layer insertSublayer:_previewLayer atIndex:0]; // [self.view addSubview:self.backgroundView]; [self.view bringSubviewToFront:self.backgroundView]; // [self.view addSubview:self.ScannerControl.view]; // CGRect rt = self.ScannerControl.scannerZone.bounds; // CGRect rt1 =output.rectOfInterest; // output.rectOfInterest = CGRectMake(0, 0, 300, 300); //self.ScannerControl.scannerZone.frame; // ScannerControllerView* view =[[ ScannerControllerView alloc] initWithFrame:self.view.frame]; // view.backgroundColor = [UIColor clearColor]; // ScannerLayer * layer = [[ScannerLayer alloc] init]; // [self.view.layer addSublayer:layer]; // Add the view to draw the bounding box for the UIView _boundingBox = [[SCShapeView alloc] initWithFrame:self.view.bounds]; _boundingBox.backgroundColor = [UIColor clearColor]; _boundingBox.hidden = YES; [self.view insertSubview:_boundingBox belowSubview:self.backgroundView]; // Add a label to display the resultant message _decodedMessage = [[UILabel alloc] initWithFrame:CGRectMake(0, CGRectGetHeight(self.view.bounds) - 75, CGRectGetWidth(self.view.bounds), 75)]; _decodedMessage.numberOfLines = 0; _decodedMessage.backgroundColor = [UIColor colorWithWhite:0.8 alpha:0.9]; _decodedMessage.textColor = [UIColor darkGrayColor]; _decodedMessage.textAlignment = NSTextAlignmentCenter; [self.view addSubview:_decodedMessage]; _decodedMessage.userInteractionEnabled = YES; UIButton *backBtn = [UIButton buttonWithType:UIButtonTypeCustom]; [backBtn setTitle:@"Cancel" forState:UIControlStateNormal]; backBtn.titleLabel.backgroundColor = [UIColor clearColor]; backBtn.titleLabel.font = [UIFont systemFontOfSize: 18.0]; [backBtn setTitleColor:UIColorFromRGB(0x007aff) forState:UIControlStateNormal]; backBtn.frame = CGRectMake(CGRectGetMaxX(self.view.frame) - 80, CGRectGetMaxY(self.view.frame)-CGRectGetHeight(_decodedMessage.frame), 60, CGRectGetHeight(_decodedMessage.frame)); [self.view addSubview:backBtn]; [backBtn addTarget:self action:@selector(backButtonClick:) forControlEvents:UIControlEventTouchUpInside]; self.scanButton.layer.shadowColor = [UIColor blackColor].CGColor; self.scanButton.layer.shadowOffset = CGSizeMake(0, 0); self.scanButton.layer.shadowOpacity = 0.5; self.scanButton.layer.shadowRadius = 2.0; self.scanButton.layer.borderColor = [[UIColor darkGrayColor] CGColor]; self.scanButton.layer.borderWidth = 15; // Start the AVSession running [session startRunning]; } - (void)backButtonClick:(UIButton *)sender { [self dismissViewControllerAnimated:YES completion:nil]; } //#pragma mark - ScannerControllerDelegate //-(void)BeginScan:(bool)begin //{ // self.handelOutput = begin; //} #pragma mark - AVCaptureMetadataOutputObjectsDelegate - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection { if(!self.handelOutput) return; AVMetadataObject * cadedate = nil; CGPoint centerzone =CGPointMake(CGRectGetMidX(self.focusZone.frame), CGRectGetMidY(self.focusZone.frame)); float distance = MAXFLOAT; for (AVMetadataObject *metadata in metadataObjects) { DebugLog(@"%@",metadata); // if ([metadata.type isEqualToString:AVMetadataObjectTypeQRCode]) { // Transform the meta-data coordinates to screen coords AVMetadataMachineReadableCodeObject *transformed = (AVMetadataMachineReadableCodeObject *)[_previewLayer transformedMetadataObjectForMetadataObject:metadata]; // Update the frame on the _boundingBox view, and show it CGRect rt = transformed.bounds; CGRectGetMidX(rt); CGPoint centermeta =CGPointMake(CGRectGetMidX(rt), CGRectGetMidY(rt)); if(CGRectContainsPoint(self.focusZone.frame, centermeta)) { if(cadedate == nil) cadedate = metadata; float distancemeta = (centermeta.x-centerzone.x)*(centermeta.x-centerzone.x)+(centermeta.y-centerzone.y)*(centermeta.y-centerzone.y); if(distancemeta