Added format and framerate selectors for AVFoundation capture devices. This is mostly so Blackmagic devices work without resorting to the QTCapture type.

Did some changing of how capture types/devices are handled internally, and how settings are changed.
Moved to an event driven model instead of a timer. Frames are now processed as soon as the capture device/API provides them.
This commit is contained in:
Zakk 2013-02-01 03:02:23 -05:00
parent 4833ac2ca6
commit c59095d0e5
17 changed files with 1615 additions and 625 deletions

View file

@ -0,0 +1,15 @@
//
// AVCaptureDeviceFormat+CocoaSplitAdditions.h
// CocoaSplit
//
// Created by Zakk on 1/19/13.
// Copyright (c) 2013 Zakk. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
@interface AVCaptureDeviceFormat (CocoaSplitAdditions)
@property (readonly) NSString *localizedName;
@end

View file

@ -0,0 +1,22 @@
//
// AVCaptureDeviceFormat+CocoaSplitAdditions.m
// CocoaSplit
//
// Created by Zakk on 1/19/13.
// Copyright (c) 2013 Zakk. All rights reserved.
//
#import "AVCaptureDeviceFormat+CocoaSplitAdditions.h"
@implementation AVCaptureDeviceFormat (CocoaSplitAdditions)
-(NSString *)localizedName
{
NSString *localizedName = nil;
CFStringRef formatName = CMFormatDescriptionGetExtension([self formatDescription], kCMFormatDescriptionExtension_FormatName);
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions((CMVideoFormatDescriptionRef)[self formatDescription]);
localizedName = [NSString stringWithFormat:@"%@, %d x %d", formatName, dimensions.width, dimensions.height];
return localizedName;
}
@end

View file

@ -36,6 +36,8 @@
347B7FC616907A1700B5F4B3 /* libbz2.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 347B7FC516907A1700B5F4B3 /* libbz2.dylib */; };
347B7FC816907AE700B5F4B3 /* VideoDecodeAcceleration.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 347B7FC716907AE700B5F4B3 /* VideoDecodeAcceleration.framework */; };
347B7FCA16907AFA00B5F4B3 /* libz.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 347B7FC916907AFA00B5F4B3 /* libz.dylib */; };
3498C78B16AB6688003D8065 /* AVCaptureDeviceFormat+CocoaSplitAdditions.m in Sources */ = {isa = PBXBuildFile; fileRef = 3498C78A16AB6688003D8065 /* AVCaptureDeviceFormat+CocoaSplitAdditions.m */; };
3498C78F16AB6853003D8065 /* AVFrameRateRange+CocoaSplitAdditions.m in Sources */ = {isa = PBXBuildFile; fileRef = 3498C78E16AB6853003D8065 /* AVFrameRateRange+CocoaSplitAdditions.m */; };
34A64A2F165EFE4C00A68428 /* PreviewView.m in Sources */ = {isa = PBXBuildFile; fileRef = 34A64A2E165EFE4B00A68428 /* PreviewView.m */; };
34A64A35165F047900A68428 /* CapturePreview.xib in Resources */ = {isa = PBXBuildFile; fileRef = 34A64A34165F047900A68428 /* CapturePreview.xib */; };
34A64A37165F208800A68428 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 34A64A36165F208800A68428 /* QuartzCore.framework */; };
@ -140,6 +142,10 @@
347B7FC516907A1700B5F4B3 /* libbz2.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libbz2.dylib; path = usr/lib/libbz2.dylib; sourceTree = SDKROOT; };
347B7FC716907AE700B5F4B3 /* VideoDecodeAcceleration.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = VideoDecodeAcceleration.framework; path = System/Library/Frameworks/VideoDecodeAcceleration.framework; sourceTree = SDKROOT; };
347B7FC916907AFA00B5F4B3 /* libz.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libz.dylib; path = usr/lib/libz.dylib; sourceTree = SDKROOT; };
3498C78916AB6688003D8065 /* AVCaptureDeviceFormat+CocoaSplitAdditions.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "AVCaptureDeviceFormat+CocoaSplitAdditions.h"; path = "../AVCaptureDeviceFormat+CocoaSplitAdditions.h"; sourceTree = "<group>"; };
3498C78A16AB6688003D8065 /* AVCaptureDeviceFormat+CocoaSplitAdditions.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "AVCaptureDeviceFormat+CocoaSplitAdditions.m"; path = "../AVCaptureDeviceFormat+CocoaSplitAdditions.m"; sourceTree = "<group>"; };
3498C78D16AB6853003D8065 /* AVFrameRateRange+CocoaSplitAdditions.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "AVFrameRateRange+CocoaSplitAdditions.h"; sourceTree = "<group>"; };
3498C78E16AB6853003D8065 /* AVFrameRateRange+CocoaSplitAdditions.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "AVFrameRateRange+CocoaSplitAdditions.m"; sourceTree = "<group>"; };
34A64A2D165EFE4B00A68428 /* PreviewView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PreviewView.h; sourceTree = "<group>"; };
34A64A2E165EFE4B00A68428 /* PreviewView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = PreviewView.m; sourceTree = "<group>"; };
34A64A34165F047900A68428 /* CapturePreview.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = CapturePreview.xib; sourceTree = "<group>"; };
@ -268,6 +274,8 @@
340FE4A715F3417E00E4CE4E /* CocoaSplit */ = {
isa = PBXGroup;
children = (
3498C78916AB6688003D8065 /* AVCaptureDeviceFormat+CocoaSplitAdditions.h */,
3498C78A16AB6688003D8065 /* AVCaptureDeviceFormat+CocoaSplitAdditions.m */,
340FE4B315F3417E00E4CE4E /* AppDelegate.h */,
340FE4B415F3417E00E4CE4E /* AppDelegate.m */,
342346D315FA301600C8C77E /* SyphonCapture.h */,
@ -290,6 +298,8 @@
340FE50615F444AA00E4CE4E /* CaptureController.h */,
340FE50715F444AA00E4CE4E /* CaptureController.m */,
340FE4B615F3417E00E4CE4E /* MainMenu.xib */,
3498C78D16AB6853003D8065 /* AVFrameRateRange+CocoaSplitAdditions.h */,
3498C78E16AB6853003D8065 /* AVFrameRateRange+CocoaSplitAdditions.m */,
34A64A34165F047900A68428 /* CapturePreview.xib */,
34539ED61606E91700638A29 /* StreamServicePanel.xib */,
34539ED8160734EA00638A29 /* FilePanel.xib */,
@ -510,6 +520,8 @@
34792AF316103FF60065A859 /* DesktopCapture.m in Sources */,
34B74BC91648D23B00818DE2 /* QTCapture.m in Sources */,
34A64A2F165EFE4C00A68428 /* PreviewView.m in Sources */,
3498C78B16AB6688003D8065 /* AVCaptureDeviceFormat+CocoaSplitAdditions.m in Sources */,
3498C78F16AB6853003D8065 /* AVFrameRateRange+CocoaSplitAdditions.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};

View file

@ -23,24 +23,31 @@
AVCaptureVideoDataOutput *_video_capture_output;
AVCaptureAudioDataOutput *_audio_capture_output;
CVImageBufferRef _currentFrame;
AVCaptureDevice *_selectedVideoCaptureDevice;
}
@property (strong) AVCaptureDevice *videoInputDevice;
@property (strong) AVCaptureDevice *audioInputDevice;
@property (readonly) NSArray *availableVideoDevices;
@property int videoCaptureFPS;
@property int width;
@property int height;
@property id videoDelegate;
@property (strong) id audioDelegate;
@property (strong) id videoDelegate;
@property (assign) int videoCaptureFPS;
@property (assign) int audioBitrate;
@property (assign) int audioSamplerate;
@property (assign) int videoHeight;
@property (assign) int videoWidth;
@property NSArray *videoFormats;
@property NSArray *videoFramerates;
@property id activeAudioDevice;
@property AVCaptureDeviceFormat *activeVideoFormat;
@property AVFrameRateRange *activeVideoFramerate;
@property AbstractCaptureDevice *activeVideoDevice;

View file

@ -12,6 +12,10 @@
@implementation AVFCapture
@synthesize activeVideoFormat = _activeVideoFormat;
@synthesize activeVideoDevice = _activeVideoDevice;
@synthesize activeVideoFramerate = _activeVideoFramerate;
-(void) setVideoDimensions:(int)width height:(int)height
{
@ -33,24 +37,63 @@
}
-(bool) setActiveAudioDevice:(id)audioDevice
-(AVFrameRateRange *)activeVideoFramerate
{
_audioInputDevice = audioDevice;
return YES;
return _activeVideoFramerate;
}
-(void) setActiveVideoFramerate:(AVFrameRateRange *)activeVideoFramerate
{
_activeVideoFramerate = activeVideoFramerate;
//TODO: ERROR HANDLING
/* [self.activeVideoDevice lockForConfiguration:nil];
self.activeVideoDevice.activeVideoMinFrameDuration = _activeVideoFramerate.minFrameDuration;
[self.activeVideoDevice unlockForConfiguration];
*/
self.videoCaptureFPS = _activeVideoFramerate.minFrameRate;
}
-(bool) setActiveVideoDevice:(AbstractCaptureDevice *)newDev
-(AVCaptureDeviceFormat *) activeVideoFormat
{
_videoInputDevice = [newDev captureDevice];
return YES;
return _activeVideoFormat;
}
-(void) setActiveVideoFormat:(id)activeVideoFormat
{
_activeVideoFormat = activeVideoFormat;
//TODO: Error handling here
/* [self.activeVideoDevice lockForConfiguration:nil];
self.activeVideoDevice.activeFormat = _activeVideoFormat;
[self.activeVideoDevice unlockForConfiguration];
*/
self.videoFramerates = self.activeVideoFormat.videoSupportedFrameRateRanges;
}
-(id) activeVideoDevice
{
return _activeVideoDevice;
}
-(void) setActiveVideoDevice:(AbstractCaptureDevice *)newDev
{
_activeVideoDevice = newDev;
_selectedVideoCaptureDevice = [newDev captureDevice];
self.videoFormats = _selectedVideoCaptureDevice.formats;
self.videoFramerates = _selectedVideoCaptureDevice.activeFormat.videoSupportedFrameRateRanges;
}
-(NSArray *) availableVideoDevices
{
@ -76,55 +119,20 @@
{
if (_capture_session)
{
[_capture_session stopRunning];
[_capture_session stopRunning];
/*
_capture_session = nil;
_video_capture_queue = nil;
_videoInputDevice = nil;
self.activeVideoDevice = nil;
_video_capture_output = nil;
_audio_capture_output = nil;
_audioInputDevice = nil;
self.activeAudioDevice = nil;
_audio_capture_queue = nil;
*/
}
return YES;
}
/*
-(void)grabPhoto
{
if (!_staticImage)
{
AVCaptureConnection *av_conn;
av_conn = [_capture_output connectionWithMediaType:AVMediaTypeVideo];
[_capture_output captureStillImageAsynchronouslyFromConnection:av_conn completionHandler:^(CMSampleBufferRef sampleBuffer, NSError *error) {
CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
//Should I copy the Image Buffer? instead of just retaining it?
CVPixelBufferRetain(videoFrame);
[_videoDelegate captureOutputVideo:self didOutputSampleBuffer:sampleBuffer didOutputImage:videoFrame];
_staticImage = videoFrame;
//CVPixelBufferRelease(videoFrame);
}];
} else {
[_videoDelegate captureOutputVideo:self didOutputSampleBuffer:nil didOutputImage:_staticImage];
}
}
*/
-(bool) startCaptureSession:(NSError **)error
{
@ -152,6 +160,20 @@
[_capture_session startRunning];
[_selectedVideoCaptureDevice lockForConfiguration:nil];
if (self.activeVideoFormat)
{
_selectedVideoCaptureDevice.activeFormat = self.activeVideoFormat;
}
if (self.activeVideoFramerate)
{
_selectedVideoCaptureDevice.activeVideoMinFrameDuration = self.activeVideoFramerate.minFrameDuration;
}
[_selectedVideoCaptureDevice unlockForConfiguration];
return YES;
}
@ -164,6 +186,7 @@
AVCaptureDeviceInput *video_capture_input;
AVCaptureDeviceInput *audio_capture_input;
if (_capture_session)
return YES;
@ -176,7 +199,7 @@
if (_videoDelegate)
{
if (!_videoInputDevice)
if (!self.activeVideoDevice)
{
NSLog(@"No video input device");
*therror = [NSError errorWithDomain:@"videoCapture" code:100 userInfo:@{NSLocalizedDescriptionKey : @"Must select video capture device first"}];
@ -186,7 +209,7 @@
_capture_session = [[AVCaptureSession alloc] init];
video_capture_input = [AVCaptureDeviceInput deviceInputWithDevice:_videoInputDevice error:therror];
video_capture_input = [AVCaptureDeviceInput deviceInputWithDevice:_selectedVideoCaptureDevice error:therror];
if (!video_capture_input)
{
@ -205,18 +228,16 @@
}
NSMutableDictionary *videoSettings = [[NSMutableDictionary alloc] init];
[videoSettings setValue:@[@(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), @(kCVPixelFormatType_422YpCbCr8)] forKey:(NSString *)kCVPixelBufferPixelFormatTypeKey];
NSDictionary *ioAttrs = [NSDictionary dictionaryWithObject: [NSNumber numberWithBool: YES]
forKey: (NSString *)kIOSurfaceIsGlobal];
[videoSettings setValue:ioAttrs forKey:(NSString *)kCVPixelBufferIOSurfacePropertiesKey];
if (self.videoHeight && self.videoWidth)
/* if (self.videoHeight && self.videoWidth)
{
[videoSettings setValue:@(self.videoHeight) forKey:(NSString *)kCVPixelBufferHeightKey];
[videoSettings setValue:@(self.videoWidth) forKey:(NSString *)kCVPixelBufferWidthKey];
}
} */
NSLog(@"SETTINGS DICT %@", videoSettings);
_video_capture_output = [[AVCaptureVideoDataOutput alloc] init];
@ -233,14 +254,20 @@
return NO;
}
AVCaptureConnection *outconn = [_video_capture_output connectionWithMediaType:AVMediaTypeVideo];
if (outconn && self.videoCaptureFPS && self.videoCaptureFPS > 0)
{
outconn.videoMinFrameDuration = CMTimeMake(1, self.videoCaptureFPS);
}
}
if (_audioDelegate)
{
if (_audioInputDevice)
if (self.activeAudioDevice)
{
audio_capture_input = [AVCaptureDeviceInput deviceInputWithDevice:_audioInputDevice error:therror];
audio_capture_input = [AVCaptureDeviceInput deviceInputWithDevice:self.activeAudioDevice error:therror];
if (!audio_capture_input)
{
@ -289,28 +316,11 @@
}
void PixelBufferRelease(void *releaseRefCon, const void *baseAddress)
{
if (baseAddress)
free((void *)baseAddress);
}
- (CVImageBufferRef) getCurrentFrame
{
//copy the current frame to a new pixel buffer
//If I don't copy the pixel buffers, sometimes they just generate exceptions, even if I retain them and lock them. Assuming
//the IOSurface is being reclaimed or something
//There may be a better way to do this?
CVImageBufferRef newbuf = NULL;
void *bufbytes;
void *current_base;
size_t width;
size_t height;
size_t bytesPerRow;
@synchronized(self)
{
@ -318,6 +328,7 @@ void PixelBufferRelease(void *releaseRefCon, const void *baseAddress)
{
CVPixelBufferRetain(_currentFrame);
return _currentFrame;
/*
CVPixelBufferLockBaseAddress(_currentFrame, 1);
width = CVPixelBufferGetWidth(_currentFrame);
@ -342,6 +353,11 @@ void PixelBufferRelease(void *releaseRefCon, const void *baseAddress)
}
- (BOOL)needsAdvancedVideo
{
return YES;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
@ -349,17 +365,20 @@ void PixelBufferRelease(void *releaseRefCon, const void *baseAddress)
{
CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferRetain(videoFrame);
@synchronized(self)
if (videoFrame)
{
if (_currentFrame)
{
CVPixelBufferRelease(_currentFrame);
}
_currentFrame = videoFrame;
CVPixelBufferRetain(videoFrame);
[self.videoDelegate captureOutputVideo:nil didOutputSampleBuffer:nil didOutputImage:videoFrame frameTime:0 ];
/*
dispatch_sync(dispatch_get_main_queue(), ^{
[self.videoDelegate captureOutputVideo:nil didOutputSampleBuffer:nil didOutputImage:newbuf frameTime:0 ];});
*/
CVPixelBufferRelease(videoFrame);
}
} else if (connection.output == _audio_capture_output) {

View file

@ -0,0 +1,14 @@
//
// AVFrameRateRange+CocoaSplitAdditions.h
// CocoaSplit
//
// Created by Zakk on 1/19/13.
// Copyright (c) 2013 Zakk. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
@interface AVFrameRateRange (CocoaSplitAdditions)
@property (readonly) NSString *localizedName;
@end

View file

@ -0,0 +1,23 @@
//
// AVFrameRateRange+CocoaSplitAdditions.m
// CocoaSplit
//
// Created by Zakk on 1/19/13.
// Copyright (c) 2013 Zakk. All rights reserved.
//
#import "AVFrameRateRange+CocoaSplitAdditions.h"
@implementation AVFrameRateRange (CocoaSplitAdditions)
- (NSString *)localizedName
{
if ([self minFrameRate] != [self maxFrameRate]) {
NSString *formatString = NSLocalizedString(@"FPS: %0.2f-%0.2f", @"FPS when minFrameRate != maxFrameRate");
return [NSString stringWithFormat:formatString, [self minFrameRate], [self maxFrameRate]];
}
NSString *formatString = NSLocalizedString(@"FPS: %0.2f", @"FPS when minFrameRate == maxFrameRate");
return [NSString stringWithFormat:formatString, [self minFrameRate]];
}
@end

View file

@ -25,18 +25,19 @@ void VideoCompressorReceiveFrame(void *, void *, OSStatus , VTEncodeInfoFlags ,
@interface CaptureController : NSObject <CaptureDataReceiverDelegateProtocol> {
id _video_capture_session;
id _audio_capture_session;
id _active_video_capture_session;
VTCompressionSessionRef _compression_session;
NSTimer *_captureTimer;
long long _frameCount;
CFAbsoluteTime _firstFrameTime;
NSString *_selectedVideoType;
dispatch_queue_t _main_capture_queue;
}
@property (retain) id<CaptureSessionProtocol> videoCaptureSession;
@property (assign) double min_delay;
@property (assign) double max_delay;
@property (assign) double avg_delay;
@ -97,7 +98,6 @@ void VideoCompressorReceiveFrame(void *, void *, OSStatus , VTEncodeInfoFlags ,
- (IBAction)removeDestination:(id)sender;
@property (weak) NSArray *videoCaptureDevices;
@property (weak) NSArray *audioCaptureDevices;
@property (strong) FFMpegTask *ffmpeg_obj;

View file

@ -14,7 +14,7 @@
#import "DesktopCapture.h"
#import "PreviewView.h"
#import <IOSurface/IOSurface.h>
#import "CaptureSessionProtocol.h"
@implementation CaptureController
@ -29,11 +29,9 @@
NSMutableURLRequest *apiRequest = [NSMutableURLRequest requestWithURL:apiURL];
NSLog(@"SENDING ASYNC URL CONNECTION");
[NSURLConnection sendAsynchronousRequest:apiRequest queue:[NSOperationQueue mainQueue] completionHandler:^(NSURLResponse *response, NSData *data, NSError *err) {
NSError *jsonError;
NSLog(@"GOT RESPONSE FOR INGESTS");
NSDictionary *ingest_response = [NSJSONSerialization JSONObjectWithData:data options:NSJSONReadingMutableContainers error:&jsonError];
//Handle error
@ -121,29 +119,34 @@
dummydev.uniqueID = uniqueID;
NSArray *currentAvailableDevices;
currentAvailableDevices = self.videoCaptureSession.availableVideoDevices;
NSUInteger sidx;
sidx = [self.videoCaptureDevices indexOfObject:dummydev];
sidx = [currentAvailableDevices indexOfObject:dummydev];
if (sidx == NSNotFound)
{
self.selectedVideoCapture = nil;
self.videoCaptureSession.activeVideoDevice = nil;
} else {
self.selectedVideoCapture = [self.videoCaptureDevices objectAtIndex:sidx];
self.videoCaptureSession.activeVideoDevice = [currentAvailableDevices objectAtIndex:sidx];
}
}
-(IBAction) videoRefresh:(id)sender
{
self.videoCaptureDevices = [_video_capture_session availableVideoDevices];
NSArray *currentAvailableDevices = self.videoCaptureSession.availableVideoDevices;
if (self.selectedVideoCapture)
{
NSUInteger sidx;
sidx = [self.videoCaptureDevices indexOfObject:self.selectedVideoCapture];
sidx = [currentAvailableDevices indexOfObject:self.selectedVideoCapture];
if (sidx == NSNotFound)
{
self.selectedVideoCapture = nil;
} else {
self.selectedVideoCapture = [self.videoCaptureDevices objectAtIndex:sidx];
self.selectedVideoCapture = [currentAvailableDevices objectAtIndex:sidx];
}
}
}
@ -159,33 +162,34 @@
-(void) setSelectedVideoType:(NSString *)selectedVideoType
{
NSLog(@"SETTING SELECTED VIDEO TYPE");
if ([selectedVideoType isEqualToString:@"Desktop"])
{
_video_capture_session = [[DesktopCapture alloc ] init];
self.videoCaptureSession = [[DesktopCapture alloc ] init];
} else if ([selectedVideoType isEqualToString:@"AVFoundation"]) {
_video_capture_session = [[AVFCapture alloc] init];
self.videoCaptureSession = [[AVFCapture alloc] init];
} else if ([selectedVideoType isEqualToString:@"QTCapture"]) {
_video_capture_session = [[QTCapture alloc] init];
self.videoCaptureSession = [[QTCapture alloc] init];
} else if ([selectedVideoType isEqualToString:@"Syphon"]) {
_video_capture_session = [[SyphonCapture alloc] init];
self.videoCaptureSession = [[SyphonCapture alloc] init];
} else {
_video_capture_session = nil;
self.videoCaptureSession = [[AVFCapture alloc] init];
}
if (!_video_capture_session)
if (!self.videoCaptureSession)
{
_audio_capture_session = nil;
_selectedVideoType = nil;
}
if ([_video_capture_session providesAudio])
if ([self.videoCaptureSession providesAudio])
{
_audio_capture_session = _video_capture_session;
_audio_capture_session = self.videoCaptureSession;
} else {
_audio_capture_session = [[AVFCapture alloc] init];
}
self.videoCaptureDevices = [_video_capture_session availableVideoDevices];
self.audioCaptureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
self.selectedVideoCapture = nil;
@ -208,6 +212,7 @@
dispatch_source_set_event_handler(sigsrc, ^{ return;});
dispatch_resume(sigsrc);
_main_capture_queue = dispatch_queue_create("CocoaSplit.main.queue", NULL);
self.destinationTypes = @{@"file" : @"File/Raw",
@"twitch" : @"Twitch TV"};
@ -228,7 +233,7 @@
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString *saveFolder = @"~/Library/Application Support/H264Streamer";
NSString *saveFolder = @"~/Library/Application Support/CocoaSplit";
saveFolder = [saveFolder stringByExpandingTildeInPath];
@ -237,7 +242,7 @@
[fileManager createDirectoryAtPath:saveFolder withIntermediateDirectories:NO attributes:nil error:nil];
}
NSString *saveFile = @"H264Streamer.settings";
NSString *saveFile = @"CocoaSplit.settings";
return [saveFolder stringByAppendingPathComponent:saveFile];
}
@ -254,12 +259,12 @@
[saveRoot setValue: [NSNumber numberWithInt:self.captureWidth] forKey:@"captureWidth"];
[saveRoot setValue: [NSNumber numberWithInt:self.captureHeight] forKey:@"captureHeight"];
[saveRoot setValue: [NSNumber numberWithInt:self.captureFPS] forKey:@"captureFPS"];
[saveRoot setValue: [NSNumber numberWithInt:self.videoCaptureSession.videoCaptureFPS] forKey:@"captureFPS"];
[saveRoot setValue: [NSNumber numberWithInt:self.captureVideoAverageBitrate] forKey:@"captureVideoAverageBitrate"];
[saveRoot setValue: [NSNumber numberWithInt:self.audioBitrate] forKey:@"audioBitrate"];
[saveRoot setValue: [NSNumber numberWithInt:self.audioSamplerate] forKey:@"audioSamplerate"];
[saveRoot setValue: self.selectedVideoType forKey:@"selectedVideoType"];
[saveRoot setValue: self.selectedVideoCapture.uniqueID forKey:@"videoCaptureID"];
[saveRoot setValue: self.videoCaptureSession.activeVideoDevice.uniqueID forKey:@"videoCaptureID"];
[saveRoot setValue: self.selectedAudioCapture.uniqueID forKey:@"audioCaptureID"];
[saveRoot setValue: self.captureDestinations forKey:@"captureDestinations"];
[saveRoot setValue: [NSNumber numberWithInt:self.captureVideoMaxBitrate] forKey:@"captureVideoMaxBitrate"];
@ -280,7 +285,6 @@
saveRoot = [NSKeyedUnarchiver unarchiveObjectWithFile:path];
self.captureWidth = [[saveRoot valueForKey:@"captureWidth"] intValue];
self.captureHeight = [[saveRoot valueForKey:@"captureHeight"] intValue];
self.captureFPS = [[saveRoot valueForKey:@"captureFPS"] intValue];
self.captureVideoAverageBitrate = [[saveRoot valueForKey:@"captureVideoAverageBitrate"] intValue];
self.captureVideoMaxBitrate = [[saveRoot valueForKey:@"captureVideoMaxBitrate"] intValue];
self.captureVideoMaxKeyframeInterval = [[saveRoot valueForKey:@"captureVideoMaxKeyframeInterval"] intValue];
@ -303,6 +307,8 @@
NSString *audioID = [saveRoot valueForKey:@"audioCaptureID"];
[self selectedAudioCaptureFromID:audioID];
self.videoCaptureSession.videoCaptureFPS = [[saveRoot valueForKey:@"captureFPS"] intValue];
}
@ -334,7 +340,14 @@
newDest = [[OutputDestination alloc] initWithType:_selectedDestinationType];
newDest.server_name = _streamingServiceServer;
newDest.stream_key = _streamingServiceKey;
newDest.destination = [_streamingDestination stringByReplacingOccurrencesOfString:@"{stream_key}" withString:_streamingServiceKey];
if (_streamingServiceKey)
{
newDest.destination = [_streamingDestination stringByReplacingOccurrencesOfString:@"{stream_key}" withString:_streamingServiceKey];
} else {
newDest.destination = _streamingDestination;
}
[[self mutableArrayValueForKey:@"captureDestinations"] addObject:newDest];
[self attachCaptureDestination:newDest];
@ -350,7 +363,7 @@
newout = [[FFMpegTask alloc] init];
newout.height = _captureHeight;
newout.width = _captureWidth;
newout.framerate = _captureFPS;
newout.framerate = self.videoCaptureSession.videoCaptureFPS;
newout.stream_output = output.destination;
newout.stream_format = output.output_format;
newout.samplerate = _audioSamplerate;
@ -380,16 +393,14 @@
[_audio_capture_session setActiveAudioDevice:_selectedAudioCapture];
[_video_capture_session setActiveVideoDevice:_selectedVideoCapture];
[_video_capture_session setVideoCaptureFPS:_captureFPS];
[_video_capture_session setVideoDelegate:self];
[_video_capture_session setVideoDimensions:_captureWidth height:_captureHeight];
[self.videoCaptureSession setVideoDelegate:self];
[self.videoCaptureSession setVideoDimensions:_captureWidth height:_captureHeight];
[_audio_capture_session setAudioDelegate:self];
[_audio_capture_session setAudioBitrate:_audioBitrate];
[_audio_capture_session setAudioSamplerate:_audioSamplerate];
success = [_video_capture_session setupCaptureSession:&error];
success = [self.videoCaptureSession setupCaptureSession:&error];
if (!success)
{
[NSApp presentError:error];
@ -424,14 +435,14 @@
}
success = [_video_capture_session startCaptureSession:&error];
success = [self.videoCaptureSession startCaptureSession:&error];
_frameCount = 0;
_compressedFrameCount = 0;
_min_delay = _max_delay = _avg_delay = 0;
_captureTimer = [NSTimer timerWithTimeInterval:1.0/_captureFPS target:self selector:@selector(newFrame) userInfo:nil repeats:YES];
[[NSRunLoop currentRunLoop] addTimer:_captureTimer forMode:NSRunLoopCommonModes];
// _captureTimer = [NSTimer timerWithTimeInterval:1.0/_captureFPS target:self selector:@selector(newFrame) userInfo:nil repeats:YES];
//[[NSRunLoop currentRunLoop] addTimer:_captureTimer forMode:NSRunLoopCommonModes];
if (!success)
{
@ -468,9 +479,9 @@
}
if (_video_capture_session)
if (self.videoCaptureSession)
{
[_video_capture_session stopCaptureSession];
[self.videoCaptureSession stopCaptureSession];
}
if (_audio_capture_session)
@ -521,7 +532,7 @@
{
int real_bitrate = self.captureVideoMaxBitrate*128; // In bytes (1024/8)
VTSessionSetProperty(_compression_session, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFTypeRef)(@[@(real_bitrate), @1]));
VTSessionSetProperty(_compression_session, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFTypeRef)(@[@(real_bitrate), @1.0]));
}
@ -536,10 +547,10 @@
}
if (_captureFPS && _captureFPS > 0)
if (self.videoCaptureSession.videoCaptureFPS && self.videoCaptureSession.videoCaptureFPS > 0)
{
VTSessionSetProperty(_compression_session, kVTCompressionPropertyKey_ExpectedFrameRate, CFNumberCreate(NULL, kCFNumberIntType, &_captureFPS));
VTSessionSetProperty(_compression_session, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)(@(self.videoCaptureSession.videoCaptureFPS)));
}
@ -552,7 +563,7 @@
{
CVImageBufferRef cFrame;
cFrame = [_video_capture_session getCurrentFrame];
cFrame = [self.videoCaptureSession getCurrentFrame];
CFAbsoluteTime currentTime = CFAbsoluteTimeGetCurrent();
@ -572,7 +583,7 @@
[self.previewCtx drawFrame:cFrame];
[self captureOutputVideo:_video_capture_session didOutputSampleBuffer:nil didOutputImage:cFrame frameTime:0 ];
[self captureOutputVideo:self.videoCaptureSession didOutputSampleBuffer:nil didOutputImage:cFrame frameTime:0 ];
}
@ -610,26 +621,61 @@
- (void)captureOutputVideo:(AbstractCaptureDevice *)fromDevice didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer didOutputImage:(CVImageBufferRef)imageBuffer frameTime:(uint64_t)frameTime
{
if (imageBuffer)
{
CVPixelBufferRetain(imageBuffer);
dispatch_async(dispatch_get_main_queue(), ^{
[self processVideoFrame:imageBuffer];
}
);
}
}
-(void)processVideoFrame:(CVImageBufferRef)imageBuffer
{
CMTime pts;
CMTime duration;
if(!imageBuffer)
return;
CFAbsoluteTime currentTime = CFAbsoluteTimeGetCurrent();
pts = CMTimeMake(currentTime*1000, 1000);
duration = CMTimeMake(1, _captureFPS);
duration = CMTimeMake(1, self.videoCaptureSession.videoCaptureFPS);
if (_frameCount == 0)
{
_firstFrameTime = currentTime;
}
_frameCount++;
if ((_frameCount % 15) == 0)
{
[self updateStatusString];
}
[self.previewCtx drawFrame:imageBuffer];
if(!imageBuffer)
return;
VTCompressionSessionEncodeFrame(_compression_session, imageBuffer, pts, duration, NULL, imageBuffer, NULL);
CVPixelBufferRelease(imageBuffer); //VTCompression should retain it?
//CVPixelBufferRelease(imageBuffer);
}
void VideoCompressorReceiveFrame(void *VTref, void *VTFrameRef, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer)
{
if (VTFrameRef)
{
CVPixelBufferRelease(VTFrameRef);
}
@autoreleasepool {
if(!sampleBuffer)
return;
@ -649,7 +695,7 @@ void VideoCompressorReceiveFrame(void *VTref, void *VTFrameRef, OSStatus status,
selfobj.max_delay = frame_delay;
}
//selfobj.compressedFrameCount++;
selfobj.compressedFrameCount++;
CFRetain(sampleBuffer);

View file

@ -15,15 +15,22 @@
@protocol CaptureSessionProtocol <NSObject>
@required
-(NSArray *) availableVideoDevices;
@property int videoCaptureFPS;
@property int width;
@property int height;
@property AbstractCaptureDevice *activeVideoDevice;
@property id videoDelegate;
@property (readonly) NSArray *availableVideoDevices;
@property (readonly) BOOL needsAdvancedVideo;
-(bool) stopCaptureSession;
-(bool) startCaptureSession:(NSError **)error;
-(bool) setActiveVideoDevice:(id)videoDevice;
-(bool) providesVideo;
-(bool) providesAudio;
-(void) setVideoDelegate:(id)delegate;
-(bool) setupCaptureSession:(NSError **)therror;
-(void) setVideoCaptureFPS:(int)fps;
-(void) setVideoDimensions:(int)width height:(int)height;
@ -38,9 +45,14 @@
@optional
@property id activeAudioDevice;
-(void) setAudioDelegate:(id)delegate;
-(bool) setActiveAudioDevice:(id)audioDevice;
-(NSArray *) availableAudioDevices;
@property NSArray *videoFormats;
@property NSArray *videoFramerates;
@property id activeVideoFormat;
@property id activeVideoFramerate;
@end

View file

@ -13,27 +13,30 @@
@interface DesktopCapture : NSObject <CaptureSessionProtocol>
{
int _width;
int _height;
dispatch_queue_t _capture_queue;
CGDisplayStreamRef _displayStreamRef;
IOSurfaceRef _currentFrame;
uint64_t _currentFrameTime;
CGDirectDisplayID _activeVideoDevice;
CGDirectDisplayID _currentDisplay;
}
-(bool)providesAudio;
-(bool)providesVideo;
-(NSArray *)availableVideoDevices;
-(void) setVideoDimensions:(int)width height:(int)height;
@property (strong) id videoDelegate;
@property (assign) int videoCaptureFPS;
@property int videoCaptureFPS;
@property int width;
@property int height;
@property AbstractCaptureDevice *activeVideoDevice;
@property id videoDelegate;
@property (readonly) NSArray *availableVideoDevices;
@property (readonly) BOOL needsAdvancedVideo;

View file

@ -14,32 +14,46 @@
@implementation DesktopCapture
@synthesize activeVideoDevice = _activeVideoDevice;
-(bool) setActiveVideoDevice:(AbstractCaptureDevice *)newDev
-(BOOL) needsAdvancedVideo
{
_activeVideoDevice = [[newDev captureDevice] unsignedIntValue];
return YES;
return NO;
}
-(AbstractCaptureDevice *)activeVideoDevice
{
return _activeVideoDevice;
}
-(void) setActiveVideoDevice:(AbstractCaptureDevice *)newDev
{
_activeVideoDevice = newDev;
_currentDisplay = [[newDev captureDevice] unsignedIntValue];
}
-(void) setVideoDimensions:(int)width height:(int)height
{
_width = width;
_height = height;
self.width = width;
self.height = height;
}
-(bool)setupCaptureSession:(NSError *__autoreleasing *)therror
{
if (!_activeVideoDevice)
if (!self.activeVideoDevice)
{
*therror = [NSError errorWithDomain:@"videoCapture" code:100 userInfo:@{NSLocalizedDescriptionKey : @"Must select video capture device first"}];
return NO;
}
if (!(_width > 0) || !(_height > 0))
if (!(self.width > 0) || !(self.height > 0))
{
*therror = [NSError errorWithDomain:@"videoCapture" code:150 userInfo:@{NSLocalizedDescriptionKey : @"Width and height must be set to greater than zero"}];
return NO;
@ -54,28 +68,28 @@
}
_currentFrameTime = 0;
NSNumber *minframetime = [NSNumber numberWithFloat:1/self.videoCaptureFPS];
if (!self.videoCaptureFPS || self.videoCaptureFPS == 0)
{
self.videoCaptureFPS = 60;
}
_displayStreamRef = CGDisplayStreamCreateWithDispatchQueue(_activeVideoDevice, _width, _height, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, (__bridge CFDictionaryRef)(@{(NSString *)kCGDisplayStreamQueueDepth : @20, (NSString *)kCGDisplayStreamMinimumFrameTime : minframetime, (NSString *)kCGDisplayStreamPreserveAspectRatio: @NO}), _capture_queue, ^(CGDisplayStreamFrameStatus status, uint64_t displayTime, IOSurfaceRef frameSurface, CGDisplayStreamUpdateRef updateRef) {
NSNumber *minframetime = [NSNumber numberWithFloat:1.0/self.videoCaptureFPS];
_displayStreamRef = CGDisplayStreamCreateWithDispatchQueue(_currentDisplay, self.width, self.height, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, (__bridge CFDictionaryRef)(@{(NSString *)kCGDisplayStreamQueueDepth : @20, (NSString *)kCGDisplayStreamMinimumFrameTime : minframetime, (NSString *)kCGDisplayStreamPreserveAspectRatio: @NO}), _capture_queue, ^(CGDisplayStreamFrameStatus status, uint64_t displayTime, IOSurfaceRef frameSurface, CGDisplayStreamUpdateRef updateRef) {
if (frameSurface)
{
CFRetain(frameSurface);
IOSurfaceIncrementUseCount(frameSurface);
@synchronized(self) {
if (_currentFrame)
{
IOSurfaceDecrementUseCount(_currentFrame);
CFRelease(_currentFrame);
}
CVPixelBufferRef tmpbuf;
_currentFrame = frameSurface;
_currentFrameTime = displayTime;
//IOSurfaceIncrementUseCount(_currentFrame);
//CFRetain(_currentFrame);
if (self.videoDelegate)
{
CVPixelBufferCreateWithIOSurface(NULL, frameSurface, NULL, &tmpbuf);
if (tmpbuf)
{
[self.videoDelegate captureOutputVideo:nil didOutputSampleBuffer:nil didOutputImage:tmpbuf frameTime:0 ];
CVPixelBufferRelease(tmpbuf);
}
}
}
});
@ -98,45 +112,6 @@
return YES;
}
void DesktopPixelBufferRelease(void *releaseRefCon, const void *baseAddress)
{
if (baseAddress)
free((void *)baseAddress);
}
- (CVImageBufferRef) getCurrentFrame
{
CVImageBufferRef newbuf = NULL;
@synchronized(self)
{
if (_currentFrame)
{
CVPixelBufferRef tmpbuf;
CVPixelBufferCreateWithIOSurface(NULL, _currentFrame, NULL, &tmpbuf);
return tmpbuf;
}
}
return newbuf;
}
-(bool)providesAudio
{
return NO;

View file

@ -12,10 +12,11 @@
@interface PreviewView : NSOpenGLView
{
IOSurfaceRef _boundIOSurface;
IOSurfaceID _boundIOSurfaceID;
GLuint _previewTexture;
GLsizei _surfaceWidth;
GLsizei _surfaceHeight;
int _hackcnt;
}

View file

@ -34,6 +34,7 @@
glEnable(GL_TEXTURE_RECTANGLE_ARB);
glGenTextures(1, &_previewTexture);
glDisable(GL_TEXTURE_RECTANGLE_ARB);
NSLog(@"SETUP PREVIEW TEXTURE");
}
return self;
@ -45,22 +46,32 @@
- (void) drawFrame:(CVImageBufferRef)cImageBuf
{
IOSurfaceRef cFrame = CVPixelBufferGetIOSurface(cImageBuf);
if (cFrame && (_boundIOSurface != cFrame))
if (!cImageBuf)
{
_boundIOSurface = cFrame;
return;
}
CVPixelBufferRetain(cImageBuf);
IOSurfaceRef cFrame = CVPixelBufferGetIOSurface(cImageBuf);
IOSurfaceID cFrameID;
if (cFrame)
{
cFrameID = IOSurfaceGetID(cFrame);
}
if (cFrame && (_boundIOSurfaceID != cFrameID))
{
_boundIOSurfaceID = cFrameID;
CGLContextObj cgl_ctx = [[self openGLContext] CGLContextObj];
_surfaceHeight = (GLsizei)IOSurfaceGetHeight(_boundIOSurface);
_surfaceWidth = (GLsizei)IOSurfaceGetWidth(_boundIOSurface);
_surfaceHeight = (GLsizei)IOSurfaceGetHeight(cFrame);
_surfaceWidth = (GLsizei)IOSurfaceGetWidth(cFrame);
/* the only formats we specify in any of the capture modules are: 420v, 420f and 2vuy. We can't handle 420* without some fragment shader /multi texture trickery, so just grab the first luminance plane and display that for now */
GLenum gl_internal_format;
GLenum gl_format;
GLenum gl_type;
OSType frame_pixel_format = IOSurfaceGetPixelFormat(cFrame);
if (frame_pixel_format == kCVPixelFormatType_422YpCbCr8)
{
gl_format = GL_YCBCR_422_APPLE;
@ -74,12 +85,13 @@
glEnable(GL_TEXTURE_RECTANGLE_ARB);
glBindTexture(GL_TEXTURE_RECTANGLE_ARB, _previewTexture);
CGLTexImageIOSurface2D(cgl_ctx, GL_TEXTURE_RECTANGLE_ARB, gl_internal_format, _surfaceWidth, _surfaceHeight, gl_format, gl_type, _boundIOSurface, 0);
CGLTexImageIOSurface2D(cgl_ctx, GL_TEXTURE_RECTANGLE_ARB, gl_internal_format, _surfaceWidth, _surfaceHeight, gl_format, gl_type, cFrame, 0);
glBindTexture(GL_TEXTURE_RECTANGLE_ARB, 0);
glDisable(GL_TEXTURE_RECTANGLE_ARB);
[self drawRect:CGRectZero];
}
CVPixelBufferRelease(cImageBuf);
}
@ -101,8 +113,8 @@
glPushMatrix();
glLoadIdentity();
if (_boundIOSurface)
{
//if (_boundIOSurface)
//{
glEnable(GL_TEXTURE_RECTANGLE_ARB);
glBindTexture(GL_TEXTURE_RECTANGLE_ARB, _previewTexture);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
@ -160,7 +172,7 @@
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
glBindTexture(GL_TEXTURE_RECTANGLE_ARB, 0);
}
//}
/* if (_boundIOSurface)
{
GLfloat texMatrix[16] = {0};

View file

@ -22,13 +22,21 @@
}
@property (strong) id videoInputDevice;
@property (strong) id videoDelegate;
@property (assign) int videoCaptureFPS;
@property int videoCaptureFPS;
@property int width;
@property int height;
@property NSString *activeVideoDevice;
@property id videoDelegate;
@property (readonly) NSArray *availableVideoDevices;
@property (readonly) BOOL needsAdvancedVideo;
-(bool) startCaptureSession:(NSError **)error;
-(bool) stopCaptureSession;
-(void) setVideoDimensions:(int)width height:(int)height;
-(bool) setupCaptureSession:(NSError **)therror;

View file

@ -13,6 +13,9 @@
@implementation QTCapture
@synthesize activeVideoDevice = _activeVideoDevice;
@synthesize availableVideoDevices = _availableVideoDevices;
-(id) init
{
@ -32,6 +35,7 @@
[_xpcConnection resume];
_xpcProxy = [_xpcConnection remoteObjectProxy];
NSLog(@"GOT PROXY OBJECT");
[self updateAvailableVideoDevices];
}
@ -59,20 +63,23 @@
-(bool) setActiveVideoDevice:(AbstractCaptureDevice *)newDev
-(NSString *) activeVideoDevice
{
NSLog(@"SET VIDEO DEVICE TO %@", [newDev uniqueID]);
_videoInputDevice = [newDev uniqueID];
return YES;
return _activeVideoDevice;
}
-(NSArray *) availableVideoDevices
-(void) setActiveVideoDevice:(AbstractCaptureDevice *)newDev
{
_activeVideoDevice = [newDev uniqueID];
}
-(void) updateAvailableVideoDevices
{
dispatch_semaphore_t reply_s = dispatch_semaphore_create(0);
NSMutableArray *__block retArray;
NSLog(@"PROXY %@", _xpcProxy);
@ -88,38 +95,39 @@
{
[retArray addObject:[[AbstractCaptureDevice alloc] initWithName:[devinstance valueForKey:@"name"] device:[devinstance valueForKey:@"id"] uniqueID:[devinstance valueForKey:@"id"]]];
}
dispatch_semaphore_signal(reply_s);
[self willChangeValueForKey:@"availableVideoDevices"];
_availableVideoDevices = (NSArray *)retArray;
[self didChangeValueForKey:@"availableVideoDevices"];
//dispatch_semaphore_signal(reply_s);
}];
/*
NSLog(@"SEMAPHORE WAIT");
dispatch_semaphore_wait(reply_s, DISPATCH_TIME_FOREVER);
NSLog(@"NO LONGER WAITING ON SEMAPHORE");
reply_s = nil;
return (NSArray *)retArray;
*/
}
-(void) newCapturedFrame:(IOSurfaceID)ioxpc reply:(void (^)())reply
{
IOSurfaceRef frameIOref = IOSurfaceLookup(ioxpc);
if (frameIOref)
{
@synchronized(self) {
if (_currentFrame)
{
IOSurfaceDecrementUseCount(_currentFrame);
//CFRelease(_currentFrame);
}
_currentFrame = frameIOref;
IOSurfaceIncrementUseCount(_currentFrame);
//CFRetain(_currentFrame);
}
}
// ALWAYS reply
CVPixelBufferRef tmpbuf;
if (self.videoDelegate && frameIOref)
{
CVPixelBufferCreateWithIOSurface(NULL, frameIOref, NULL, &tmpbuf);
if (tmpbuf)
{
[self.videoDelegate captureOutputVideo:nil didOutputSampleBuffer:nil didOutputImage:tmpbuf frameTime:0 ];
CVPixelBufferRelease(tmpbuf);
}
}
//ALWAYS REPLY
reply();
}
@ -136,8 +144,8 @@
-(bool) startCaptureSession:(NSError **)error
{
NSLog(@"CALLING STARTXPC WITH %@", _videoInputDevice);
[_xpcProxy startXPCCaptureSession:_videoInputDevice];
NSLog(@"CALLING STARTXPC WITH %@", self.activeVideoDevice);
[_xpcProxy startXPCCaptureSession:self.activeVideoDevice];
return YES;
}

File diff suppressed because it is too large Load diff