Let VTCompressionSession do the pixel transfer so it happens on a background thread instead of during the main render/recorder loop

Properly flag keyFrames from VTCompressionSession
This commit is contained in:
Zakk 2019-11-03 06:25:10 -05:00
parent 9021929c56
commit 07b26b99ea
3 changed files with 87 additions and 55 deletions

View file

@ -393,6 +393,7 @@
{
double startTime;
double start_t, end_t, elapsed_t;
startTime = [[CaptureController sharedCaptureController] mach_time_seconds];
@ -405,65 +406,80 @@
{
@autoreleasepool {
if (self.layout.layoutTimingSource && self.layout.layoutTimingSource.videoInput && self.layout.layoutTimingSource.videoInput.canProvideTiming)
{
CSCaptureBase *newTiming = (CSCaptureBase *)self.layout.layoutTimingSource.videoInput;
newTiming.timerDelegateCtx = nil;
newTiming.timerDelegate = self;
return;
}
//_frame_time = nowTime;//startTime;
if (![[CaptureController sharedCaptureController] sleepUntil:(startTime += 1.0/self.layout.frameRate)])
{
//NSLog(@"MISSED FRAME!");
continue;
}
int drain_cnt = 0;
if (!self.recordingActive)
{
for (OutputDestination *outdest in self.outputs)
{
if (outdest.buffer_draining)
{
drain_cnt++;
}
[outdest writeEncodedData:nil];
}
if (!drain_cnt)
if (self.layout.layoutTimingSource && self.layout.layoutTimingSource.videoInput && self.layout.layoutTimingSource.videoInput.canProvideTiming)
{
CSCaptureBase *newTiming = (CSCaptureBase *)self.layout.layoutTimingSource.videoInput;
newTiming.timerDelegateCtx = nil;
newTiming.timerDelegate = self;
return;
}
}
_frame_time = startTime;
//_frame_time = nowTime;//startTime;
startTime += 1.0/self.layout.frameRate;
double start_time = [CaptureController.sharedCaptureController mach_time_seconds];
if (![[CaptureController sharedCaptureController] sleepUntil:startTime])
{
NSLog(@"SLEEP UNTIL %f CURRENT TIME %f LAYOUT %@ %@", startTime, [[CaptureController sharedCaptureController] mach_time_seconds], self.layout.name, NSThread.currentThread);
continue;
}
//NSLog(@"SLEPT %f %f", end_time - start_time, self.layout.frameRate);
int drain_cnt = 0;
if (!self.recordingActive)
{
for (OutputDestination *outdest in self.outputs)
{
if (outdest.buffer_draining)
{
drain_cnt++;
}
[outdest writeEncodedData:nil];
}
if (!drain_cnt)
{
return;
}
}
_frame_time = startTime;
start_t = [CaptureController.sharedCaptureController mach_time_seconds];
[self newFrame];
}
end_t = [CaptureController.sharedCaptureController mach_time_seconds];
elapsed_t = end_t - start_t;
if (elapsed_t > 1.0/self.layout.frameRate)
{
NSLog(@"NEW FRAME TOOK %f", elapsed_t);
}
}
}
}
-(void) newFrame
{
double start_t,end_t, elapsed_t;
CVPixelBufferRef newFrame;
//double nfstart = [self mach_time_seconds];
start_t = [CaptureController.sharedCaptureController mach_time_seconds];
newFrame = [self.renderer currentImg];
end_t = [CaptureController.sharedCaptureController mach_time_seconds];
elapsed_t = end_t - start_t;
if (elapsed_t > 1.0f/60.0f)
{
NSLog(@"RENDER TOOK %f", elapsed_t);
}
if (self.frameReadyBlock)
{
self.frameReadyBlock();
@ -486,7 +502,8 @@
}
*/
start_t = [CaptureController.sharedCaptureController mach_time_seconds];
if (newFrame && self.compressors && self.compressors.count > 0)
{
_frameCount++;
@ -522,14 +539,19 @@
id <VideoCompressor> compressor;
compressor = useCompressors[cKey];
CapturedFrameData *newFrameData = newData.copy;
[compressor compressFrame:newFrameData];
if ([compressor hasOutputs])
{
used_compressor_count++;
}
}
end_t = [CaptureController.sharedCaptureController mach_time_seconds];
elapsed_t = end_t - start_t;
//if (elapsed_t > 1.0f/60.0f)
{
//NSLog(@"COMPRESSOR STUFF TOOK %f %@", elapsed_t, self.layout.name);
}
CVPixelBufferRelease(newFrame);
if (used_compressor_count == 0)

View file

@ -3203,7 +3203,8 @@ NSString *const CSAppearanceSystem = @"CSAppearanceSystem";
{
//[self.mainLayoutRecorder startRecording];
self.mainRecordingActive = YES;
for (OutputDestination *outdest in _captureDestinations)
NSArray *destsCopy = _captureDestinations.copy;
for (OutputDestination *outdest in destsCopy)
{
if (!outdest.isRecorder || outdest.captureRunning)

View file

@ -166,10 +166,12 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
frameProperties = NULL;
//}
/*
if (!_vtpt_ref)
{
VTPixelTransferSessionCreate(kCFAllocatorDefault, &_vtpt_ref);
VTSessionSetProperty(_vtpt_ref, kVTPixelTransferPropertyKey_ScalingMode, kVTScalingMode_Letterbox);
VTSessionSetProperty(_vtpt_ref, kVTPixelTransferPropertyKey_RealTime, kCFBooleanTrue);
}
CVPixelBufferRef converted_frame;
@ -182,16 +184,17 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
//CVPixelBufferCreate(kCFAllocatorDefault, self.working_width, self.working_height, kCVPixelFormatType_422YpCbCr8, 0, &converted_frame);
VTPixelTransferSessionTransferImage(_vtpt_ref, imageBuffer, converted_frame);
*/
//set it to nil since this is our private copy and this will force the frameData instance to release the video data
frameData.videoFrame = nil;
frameData.encoderData = converted_frame;
//frameData.videoFrame = nil;
//frameData.encoderData = converted_frame;
CVPixelBufferRelease(imageBuffer);
//CVPixelBufferRelease(imageBuffer);
VTCompressionSessionEncodeFrame(_compression_session, converted_frame, frameData.videoPTS, frameData.videoDuration, frameProperties, (__bridge_retained void *)(frameData), NULL);
VTCompressionSessionEncodeFrame(_compression_session, frameData.videoFrame, frameData.videoPTS, frameData.videoDuration, frameProperties, (__bridge_retained void *)(frameData), NULL);
if (frameProperties)
{
@ -248,6 +251,12 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
VTSessionSetProperty(_compression_session, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)(@(captureFPS)));
}
CFMutableDictionaryRef transferProps = CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(transferProps, kVTPixelTransferPropertyKey_ScalingMode, kVTScalingMode_Letterbox);
VTSessionSetProperty(_compression_session, kVTCompressionPropertyKey_PixelTransferProperties, transferProps);
CFRelease(transferProps);
[self configureCompressionSession:_compression_session];
@ -298,7 +307,8 @@ void VideoCompressorReceiveFrame(void *VTref, void *VTFrameRef, OSStatus status,
}
CVPixelBufferRelease(frameData.encoderData);
//CVPixelBufferRelease(frameData.videoFrame);
frameData.videoFrame = nil;
//frameData.videoFrame = nil;
@ -312,8 +322,7 @@ void VideoCompressorReceiveFrame(void *VTref, void *VTFrameRef, OSStatus status,
attach = CFArrayGetValueAtIndex(sample_attachments, 0);
depends_on_others = CFDictionaryGetValue(attach, kCMSampleAttachmentKey_DependsOnOthers);
frameData.isKeyFrame = CFBooleanGetValue(depends_on_others);
frameData.isKeyFrame = !CFBooleanGetValue(depends_on_others);
}