I have two files:
CameraViewController.h
CameraViewController.m
Unfortunately, the method captureOutput:didOutputSampleBuffer:fromConnection is not called. What is wrong with my solution, have I missed something? I can see camera preview rendered in UIView so I am sure that everything except the captureOutput method works.
CameraViewController.h
Objective-C:
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface CameraViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate>
@end
NS_ASSUME_NONNULL_END
CameraViewController.m
Objective-C:
#import "CameraViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "AVCamRecorder.h"
@interface CameraViewController ()
@property (nonatomic) AVCaptureSession *captureSession;
@property (nonatomic) AVCaptureVideoDataOutput *videoDataOutput;
@property (nonatomic) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (strong, nonatomic) UIView *previewView;
@end
@implementation CameraViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.previewView = [[UIView alloc] initWithFrame:self.view.bounds];
self.previewView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
[self.previewView setBackgroundColor:[UIColor blackColor]];
[self.view addSubview:self.previewView];
[self.view bringSubviewToFront:self.previewView];
self.captureSession = [AVCaptureSession new];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
// Create a VideoDataOutput and add it to the session
self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[self.videoDataOutput setSampleBufferDelegate:self queue:queue];
// Specify the pixel format
NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[self.videoDataOutput setVideoSettings:rgbOutputSettings];
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
AVCaptureDevice *backCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (!backCamera) {
NSLog(@"Unable to access back camera!");
return;
}
NSError *error;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera
error:&error];
if (error) {
NSLog(@"Error Unable to initialize back camera: %@", error.localizedDescription);
return;
}
if ([self.captureSession canAddInput:input] && [self.captureSession canAddOutput:self.videoDataOutput]) {
[self.captureSession addInput:input];
[self.captureSession addOutput:self.videoDataOutput];
[self setupLivePreview];
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
NSLog(@"captureOutput");
}
- (void)setupLivePreview {
self.videoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
if (self.videoPreviewLayer) {
self.videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.videoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
[self.previewView.layer addSublayer:self.videoPreviewLayer];
dispatch_queue_t globalQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
dispatch_async(globalQueue, ^{
[self.captureSession startRunning];
dispatch_async(dispatch_get_main_queue(), ^{
self.videoPreviewLayer.frame = self.previewView.bounds;
});
});
}
}
- (void)viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
[self.captureSession stopRunning];
}
@end
Unfortunately, the method captureOutput:didOutputSampleBuffer:fromConnection is not called. What is wrong with my solution, have I missed something? I can see camera preview rendered in UIView so I am sure that everything except the captureOutput method works.