本文介绍了AVCaptureStillImageOutput永远不会调用完成处理程序的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!
问题描述
以下代码无效。 Whats wrong?
Following code doesn't work. Whats wrong?
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil]; AVCaptureSession * captureSession = [[AVCaptureSession alloc] init]; captureSession.sessionPreset = AVCaptureSessionPresetMedium; if (![captureSession canAddInput:videoInput]) NSLog(@"Can't add input"); [captureSession addInput:videoInput]; self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; [self.stillImageOutput setOutputSettings:@{AVVideoCodecKey:AVVideoCodecJPEG}]; if (![captureSession canAddOutput:videoInput]) NSLog(@"Can't add output"); [captureSession addOutput:self.stillImageOutput]; [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { NSLog(@"!!!"); if (imageDataSampleBuffer == NULL) { NSLog(@"%@", error); return; } NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; UIImage *image = [[UIImage alloc] initWithData:imageData]; self.imageView.image = image; }]; // Creating preview layer self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession]; self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; self.previewLayer.frame = self.view.layer.bounds; [self.view.layer addSublayer:self.previewLayer]; [captureSession startRunning];AVCaptureVideoPreviewLayer效果不错,但AVCaptureStillImageOutput不会调用完成处理程序...
AVCaptureVideoPreviewLayer works nice, but AVCaptureStillImageOutput does not call completion handler at all...
推荐答案您需要设置&以一种方法开始会话,然后具有单独的捕获方法:
You need to set up & start your session in one method, then have a separate capture method :
///////////////////////////////////////////////// //// //// Utility to find front camera //// ///////////////////////////////////////////////// -(AVCaptureDevice *) frontFacingCameraIfAvailable{ NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; AVCaptureDevice *captureDevice = nil; for (AVCaptureDevice *device in videoDevices){ if (device.position == AVCaptureDevicePositionFront){ captureDevice = device; break; } } // couldn't find one on the front, so just get the default video device. if (!captureDevice){ captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; } return captureDevice; } ///////////////////////////////////////////////// //// //// Setup Session, attach Video Preview Layer //// and Capture Device, start running session //// ///////////////////////////////////////////////// -(void) setupCaptureSession { AVCaptureSession *session = [[AVCaptureSession alloc] init]; session.sessionPreset = AVCaptureSessionPresetMedium; AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session]; [self.view.layer addSublayer:captureVideoPreviewLayer]; NSError *error = nil; AVCaptureDevice *device = [self frontFacingCameraIfAvailable]; AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; if (!input) { // Handle the error appropriately. NSLog(@"ERROR: trying to open camera: %@", error); } [session addInput:input]; self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil]; [self.stillImageOutput setOutputSettings:outputSettings]; [session addOutput:self.stillImageOutput]; [session startRunning]; } ///////////////////////////////////////////////// //// //// Method to capture Still Image from //// Video Preview Layer //// ///////////////////////////////////////////////// -(void) captureNow { AVCaptureConnection *videoConnection = nil; for (AVCaptureConnection *connection in self.stillImageOutput.connections) { for (AVCaptureInputPort *port in [connection inputPorts]) { if ([[port mediaType] isEqual:AVMediaTypeVideo] ) { videoConnection = connection; break; } } if (videoConnection) { break; } } NSLog(@"about to request a capture from: %@", self.stillImageOutput); __weak typeof(self) weakSelf = self; [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) { NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer]; UIImage *image = [[UIImage alloc] initWithData:imageData]; [weakSelf displayImage:image]; }]; }更多推荐
AVCaptureStillImageOutput永远不会调用完成处理程序
发布评论