Fixed bug where changing an output’s compressor wouldn’t deregister it with the old compressor and cause two streams to be fed into one output. (which is obviously broken)

Send each compressor an instance of CapturedFrameData, since sharing them across compressors is…dumb. Maybe I should have realized this earlier…
This commit is contained in:
Zakk 2014-07-11 04:45:15 -04:00
parent c89c38f021
commit 8808ee08cf
15 changed files with 238 additions and 166 deletions

View file

@ -19,6 +19,8 @@
{
VTCompressionSessionRef _compression_session;
VTPixelTransferSessionRef _vtpt_ref;
}
@ -34,7 +36,6 @@
-(bool)compressFrame:(CapturedFrameData *)frameData;
-(BOOL) setupResolution:(CVImageBufferRef)withFrame;
@end

View file

@ -87,6 +87,7 @@ OSStatus VTCompressionSessionCopySupportedPropertyDictionary(VTCompressionSessio
-(void) reset
{
self.errored = NO;
VTCompressionSessionInvalidate(_compression_session);
if (_compression_session)
@ -97,6 +98,8 @@ OSStatus VTCompressionSessionCopySupportedPropertyDictionary(VTCompressionSessio
_compression_session = nil;
}
- (void) dealloc
{
[self reset];
@ -132,7 +135,6 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
if (![self setupCompressor:frameData.videoFrame])
{
//CVPixelBufferRelease(imageBuffer);
return NO;
}
return NO;
@ -140,7 +142,6 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
CFMutableDictionaryRef frameProperties;
/*
@ -154,8 +155,31 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
frameProperties = NULL;
//}
if (!_vtpt_ref)
{
VTPixelTransferSessionCreate(kCFAllocatorDefault, &_vtpt_ref);
VTSessionSetProperty(_vtpt_ref, kVTPixelTransferPropertyKey_ScalingMode, kVTScalingMode_Letterbox);
}
CVPixelBufferRef converted_frame;
VTCompressionSessionEncodeFrame(_compression_session, frameData.videoFrame, frameData.videoPTS, frameData.videoDuration, frameProperties, (__bridge_retained void *)(frameData), NULL);
CVImageBufferRef imageBuffer = frameData.videoFrame;
CVPixelBufferRetain(imageBuffer);
CVPixelBufferCreate(kCFAllocatorDefault, self.width, self.height, kCVPixelFormatType_420YpCbCr8Planar, 0, &converted_frame);
VTPixelTransferSessionTransferImage(_vtpt_ref, imageBuffer, converted_frame);
//set it to nil since this is our private copy and this will force the frameData instance to release the video data
frameData.videoFrame = nil;
frameData.encoderData = converted_frame;
CVPixelBufferRelease(imageBuffer);
[self setAudioData:frameData syncObj:self];
VTCompressionSessionEncodeFrame(_compression_session, converted_frame, frameData.videoPTS, frameData.videoDuration, frameProperties, (__bridge_retained void *)(frameData), NULL);
if (frameProperties)
{
@ -198,6 +222,7 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
if (status != noErr || !_compression_session)
{
NSLog(@"COMPRESSOR SETUP ERROR");
self.errored = YES;
return NO;
}
@ -209,12 +234,12 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
//VTSessionSetProperty(_compression_session, (CFStringRef)@"Priority", (__bridge CFTypeRef)(@-20));
/*
NSDictionary *transferSpec = @{
(__bridge NSString *)kVTPixelTransferPropertyKey_ScalingMode: (__bridge NSString *)kVTScalingMode_Letterbox,
};
VTSessionSetProperty(_compression_session, kVTCompressionPropertyKey_PixelTransferProperties, (__bridge CFTypeRef)(transferSpec));
*/
VTSessionSetProperty(_compression_session, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanFalse);
VTSessionSetProperty(_compression_session, (__bridge CFStringRef)@"RealTime", kCFBooleanTrue);
@ -331,6 +356,7 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
}
_audioBuffer = [[NSMutableArray alloc] init];
return YES;
}
@ -347,7 +373,7 @@ void VideoCompressorReceiveFrame(void *VTref, void *VTFrameRef, OSStatus status,
*/
@autoreleasepool {
//@autoreleasepool {
@ -360,6 +386,7 @@ void VideoCompressorReceiveFrame(void *VTref, void *VTFrameRef, OSStatus status,
CapturedFrameData *frameData;
frameData = (__bridge_transfer CapturedFrameData *)(VTFrameRef);
@ -370,31 +397,31 @@ void VideoCompressorReceiveFrame(void *VTref, void *VTFrameRef, OSStatus status,
}
/* We don't need the original video frame anymore, set the property to nil, which will release the CVImageBufferRef */
CVPixelBufferRelease(frameData.encoderData);
frameData.videoFrame = nil;
//frameData.videoFrame = nil;
frameData.encodedSampleBuffer = sampleBuffer;
AppleVTCompressor *selfobj = (__bridge AppleVTCompressor *)VTref;
for (id dKey in selfobj.outputs)
{
OutputDestination *dest = selfobj.outputs[dKey];
[dest writeEncodedData:frameData];
}
//[selfobj.outputDelegate outputEncodedData:frameData];
//[selfobj.outputDelegate outputSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
//}
}

View file

@ -241,8 +241,6 @@ void VideoCompressorReceiveFrame(void *, void *, OSStatus , VTEncodeInfoFlags ,
- (void) outputSampleBuffer:(CMSampleBufferRef)theBuffer;
- (void) outputAVPacket:(AVPacket *)avpkt codec_ctx:(AVCodecContext *)codec_ctx;
- (void)saveSettings;
- (void)loadSettings;
- (bool) startStream;

View file

@ -1082,29 +1082,6 @@
}
- (void) outputAVPacket:(AVPacket *)avpkt codec_ctx:(AVCodecContext *)codec_ctx
{
for (OutputDestination *outdest in _captureDestinations)
{
if (outdest.active)
{
id ffmpeg = outdest.ffmpeg_out;
[ffmpeg writeAVPacket:avpkt codec_ctx:codec_ctx];
}
}
}
- (void) outputSampleBuffer:(CMSampleBufferRef)theBuffer
{
for (OutputDestination *outdest in _captureDestinations)
{
if (outdest.active)
{
id ffmpeg = outdest.ffmpeg_out;
[ffmpeg writeVideoSampleBuffer:theBuffer];
}
}
}
-(bool) setupCompressors
@ -1211,6 +1188,7 @@
}
for (OutputDestination *outdest in _captureDestinations)
{
[outdest reset];
@ -1418,11 +1396,6 @@
self.captureRunning = NO;
for (OutputDestination *out in _captureDestinations)
{
[out stopOutput];
}
for (id cKey in self.compressors)
{
id <h264Compressor> ctmp = self.compressors[cKey];
@ -1431,6 +1404,12 @@
[ctmp reset];
}
}
for (OutputDestination *out in _captureDestinations)
{
[out stopOutput];
}
if (floor(NSAppKitVersionNumber) <= NSAppKitVersionNumber10_8)
@ -1445,13 +1424,7 @@
[[NSProcessInfo processInfo] endActivity:_activity_token];
}
[self.audioCaptureSession stopAudioCompression];
@synchronized(self)
{
audioBuffer = [[NSMutableArray alloc] init];
}
}
- (IBAction)streamButtonPushed:(id)sender {
@ -1512,7 +1485,7 @@
if (CMTIME_COMPARE_INLINE(_firstAudioTime, ==, kCMTimeZero))
{
NSLog(@"SETTING FIRST AUDIO TIME");
_firstAudioTime = orig_pts;
return;
}
@ -1522,16 +1495,21 @@
CMTime pts = CMTimeAdd(real_pts, adjust_pts);
//NSLog(@"AUDIO PTS %@", CMTimeCopyDescription(kCFAllocatorDefault, pts));
CMSampleBufferSetOutputPresentationTimeStamp(sampleBuffer, pts);
if (audioBuffer)
for(id cKey in self.compressors)
{
@synchronized(self)
{
[audioBuffer addObject:(__bridge id)sampleBuffer];
}
id <h264Compressor> compressor;
compressor = self.compressors[cKey];
[compressor addAudioData:sampleBuffer];
}
CFRelease(sampleBuffer);
}
@ -1621,21 +1599,6 @@
}
-(NSMutableArray *)swapAudioBuffer
{
NSMutableArray *newBuf;
newBuf = [[NSMutableArray alloc] init];
NSMutableArray *retBuf;
retBuf = audioBuffer;
audioBuffer = newBuf;
return retBuf;
}
-(void) newFrameDispatched
@ -1712,11 +1675,8 @@
[self setupCompressors];
}
CapturedFrameData *capturedData = [[CapturedFrameData alloc] init];
capturedData.videoFrame = newFrame;
[self processVideoFrame:capturedData];
[self processVideoFrame:newFrame];
} else {
@ -1737,7 +1697,7 @@
-(void)processVideoFrame:(CapturedFrameData *)frameData
-(void)processVideoFrame:(CVPixelBufferRef)videoFrame
{
@ -1753,12 +1713,9 @@
CMTime duration;
//compressor should have a ready? method
if (_firstFrameTime == 0)
{
_firstFrameTime = _frame_time;
}
@ -1773,55 +1730,23 @@
pts = CMTimeMake(ptsTime*1000000, 1000000);
//NSLog(@"PTS TIME IS %@", CMTimeCopyDescription(kCFAllocatorDefault, pts));
duration = CMTimeMake(1000, self.videoCaptureSession.videoCaptureFPS*1000);
frameData.videoPTS = pts;
frameData.videoDuration = duration;
frameData.frameNumber = _frameCount;
frameData.frameTime = _frame_time;
CMTime videoTime = frameData.videoPTS;
NSUInteger audioConsumed = 0;
@synchronized(self)
{
NSUInteger audioBufferSize = [audioBuffer count];
for (int i = 0; i < audioBufferSize; i++)
{
CMSampleBufferRef audioData = (__bridge CMSampleBufferRef)[audioBuffer objectAtIndex:i];
CMTime audioTime = CMSampleBufferGetOutputPresentationTimeStamp(audioData);
if (CMTIME_COMPARE_INLINE(audioTime, <=, videoTime))
{
audioConsumed++;
[frameData.audioSamples addObject:(__bridge id)audioData];
} else {
break;
}
}
if (audioConsumed > 0)
{
[audioBuffer removeObjectsInRange:NSMakeRange(0, audioConsumed)];
}
}
for(id cKey in self.compressors)
{
CapturedFrameData *newFrameData = [[CapturedFrameData alloc] init];
newFrameData.videoPTS = pts;
newFrameData.videoDuration = duration;
newFrameData.frameNumber = _frameCount;
newFrameData.frameTime = _frame_time;
newFrameData.videoFrame = videoFrame;
id <h264Compressor> compressor;
compressor = self.compressors[cKey];
[compressor compressFrame:frameData];
[compressor compressFrame:newFrameData];
}

View file

@ -20,6 +20,8 @@
@property (assign) double frameTime;
@property CVImageBufferRef videoFrame;
@property (assign) void *encoderData;
//Array of CMSampleBuffers from audio capture.
@property (retain) NSMutableArray *audioSamples;

View file

@ -32,8 +32,11 @@
-(void)dealloc
{
if (_videoFrame)
{
CVPixelBufferRelease(_videoFrame);
}
@ -49,6 +52,13 @@
}
for (id object in self.audioSamples)
{
CMSampleBufferRef audioSample = (__bridge CMSampleBufferRef)object;
CFRelease(audioSample);
}
self.audioSamples = nil;
}
@ -81,12 +91,16 @@
-(void)setVideoFrame:(CVImageBufferRef)videoFrame
{
if (_videoFrame)
{
CVPixelBufferRelease(_videoFrame);
}
if (videoFrame)
{
CVPixelBufferRetain(videoFrame);
}
CVPixelBufferRetain(videoFrame);
_videoFrame = videoFrame;
}

View file

@ -11,6 +11,9 @@
#import "h264Compressor.h"
@interface CompressorBase : NSObject <h264Compressor, NSCoding>
{
NSMutableArray *_audioBuffer;
}
@ -36,6 +39,9 @@
-(void) reset;
-(BOOL) setupResolution:(CVImageBufferRef)withFrame;
-(void) addAudioData:(CMSampleBufferRef)audioData;
-(void) setAudioData:(CapturedFrameData *)forFrame syncObj:(id)syncObj;

View file

@ -26,6 +26,8 @@
self.resolutionOption = @"Use Source";
self.outputs = [[NSMutableDictionary alloc] init];
_audioBuffer = [[NSMutableArray alloc] init];
}
return self;
@ -62,6 +64,58 @@
-(void) addAudioData:(CMSampleBufferRef)audioData
{
if ([self hasOutputs] && audioData && _audioBuffer)
{
CFRetain(audioData);
@synchronized(self)
{
[_audioBuffer addObject:(__bridge id)audioData];
}
}
}
-(void) setAudioData:(CapturedFrameData *)forFrame syncObj:(id)syncObj
{
NSUInteger audioConsumed = 0;
//@synchronized(syncObj)
//{
NSUInteger audioBufferSize = [_audioBuffer count];
for (int i = 0; i < audioBufferSize; i++)
{
CMSampleBufferRef audioData = (__bridge CMSampleBufferRef)[_audioBuffer objectAtIndex:i];
CMTime audioTime = CMSampleBufferGetOutputPresentationTimeStamp(audioData);
if (CMTIME_COMPARE_INLINE(audioTime, <=, forFrame.videoPTS))
{
audioConsumed++;
[forFrame.audioSamples addObject:(__bridge id)audioData];
} else {
break;
}
}
if (audioConsumed > 0)
{
[_audioBuffer removeObjectsInRange:NSMakeRange(0, audioConsumed)];
}
//}
}
-(void) reset
{
return;
@ -88,6 +142,7 @@
-(bool)setupCompressor:(CVPixelBufferRef)videoFrame
{
return YES;
}

View file

@ -22,6 +22,7 @@
@property (readonly) double captureFPS;
@property (readonly) int audioBitrate;
@property (readonly) int audioSamplerate;
@property (assign) BOOL captureRunning;
@property int captureVideoMaxKeyframeInterval;
@property int captureVideoMaxBitrate;
@ -39,9 +40,7 @@
@property (strong) NSMutableDictionary *compressors;
- (void) outputAVPacket:(AVPacket *)avpkt codec_ctx:(AVCodecContext *)codec_ctx;
- (void)captureOutputAudio:(id)fromDevice didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void) outputSampleBuffer:(CMSampleBufferRef)theBuffer;
-(void)captureOutputAudio:(id)fromDevice didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
-(void)newFrame;
-(void)setExtraData:(id)saveData forKey:(NSString *)forKey;
-(id)getExtraData:(NSString *)forkey;

View file

@ -40,9 +40,9 @@
}
-(void) writeVideoSampleBuffer:(CMSampleBufferRef)theBuffer;
-(void) writeVideoSampleBuffer:(CapturedFrameData *)frameData;
-(void) writeAudioSampleBuffer:(CMSampleBufferRef)theBuffer presentationTimeStamp:(CMTime)pts;
-(void) writeAVPacket:(AVPacket *)pkt codec_ctx:(AVCodecContext *)codec_ctx;
-(void) writeAVPacket:(CapturedFrameData *)frameData;
-(void) writeEncodedData:(CapturedFrameData *)frameData;
-(void) updateOutputStats;
-(void) updateInputStats;

View file

@ -137,9 +137,12 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
}
-(void) writeEncodedData:(CapturedFrameData *)frameData
-(void) writeEncodedData:(CapturedFrameData *)frameDataIn
{
CapturedFrameData *frameData = frameDataIn;
if (!self.active)
{
return;
@ -162,6 +165,7 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
if (!_av_video_stream && _audio_extradata)
{
if ([self createAVFormatOut:frameData.encodedSampleBuffer codec_ctx:frameData.avcodec_ctx])
{
[self initStatsValues];
@ -199,14 +203,14 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
if (frameData.encodedSampleBuffer)
{
[self writeVideoSampleBuffer:frameData.encodedSampleBuffer];
[self writeVideoSampleBuffer:frameData];
} else if (frameData.avcodec_pkt) {
[self writeAVPacket:frameData.avcodec_pkt codec_ctx:frameData.avcodec_ctx];
[self writeAVPacket:frameData];
}
}
-(void) writeAudioSampleBuffer:(CMSampleBufferRef)theBuffer presentationTimeStamp:(CMTime)pts;
-(void) writeAudioSampleBuffer:(CMSampleBufferRef)theBuffer presentationTimeStamp:(CMTime)pts
{
@ -258,7 +262,7 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
// pkt.pts = pts.value;
if (av_interleaved_write_frame(_av_fmt_ctx, &pkt) < 0)
{
NSLog(@"AV WRITE AUDIO failed");
NSLog(@"AV WRITE AUDIO failed for %@", self.stream_output);
[self stopProcess];
}
//CMSampleBufferInvalidate(theBuffer);
@ -463,10 +467,13 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
_output_frame_timestamp = time_now;
}
-(void) writeAVPacket:(AVPacket *)pkt codec_ctx:(AVCodecContext *)codec_ctx
//(AVPacket *)pkt codec_ctx:(AVCodecContext *)codec_ctx
-(void) writeAVPacket:(CapturedFrameData *)frameData
{
AVPacket *pkt = frameData.avcodec_pkt;
if (!_stream_dispatch)
{
_stream_dispatch = dispatch_queue_create("FFMpeg Stream Dispatch", NULL);
@ -520,12 +527,12 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
if (p->pts != AV_NOPTS_VALUE)
{
p->pts = av_rescale_q(p->pts, codec_ctx->time_base, _av_video_stream->time_base);
p->pts = av_rescale_q(p->pts, frameData.avcodec_ctx->time_base, _av_video_stream->time_base);
}
if (p->dts != AV_NOPTS_VALUE)
{
p->dts = av_rescale_q(p->dts, codec_ctx->time_base, _av_video_stream->time_base);
p->dts = av_rescale_q(p->dts, frameData.avcodec_ctx->time_base, _av_video_stream->time_base);
}
@ -537,7 +544,7 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
/* Write the compressed frame to the media file. */
if (av_interleaved_write_frame(_av_fmt_ctx, p) < 0)
{
NSLog(@"INTERLEAVED WRITE FRAME FAILED");
NSLog(@"INTERLEAVED WRITE FRAME FAILED FOR %@ frame number %lld", self.stream_output, frameData.frameNumber);
}
@ -556,11 +563,11 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
}
-(void) writeVideoSampleBuffer:(CMSampleBufferRef)theBuffer
-(void) writeVideoSampleBuffer:(CapturedFrameData *)frameData
{
if (!theBuffer)
if (!frameData || !frameData.encodedSampleBuffer)
{
return;
}
@ -593,10 +600,10 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
}
CFRetain(theBuffer);
CFRetain(frameData.encodedSampleBuffer);
CMBlockBufferRef tmp_sample_data = CMSampleBufferGetDataBuffer(theBuffer);
CMBlockBufferRef tmp_sample_data = CMSampleBufferGetDataBuffer(frameData.encodedSampleBuffer);
size_t data_length = CMBlockBufferGetDataLength(tmp_sample_data);
@ -610,6 +617,7 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
dispatch_async(_stream_dispatch, ^{
CMSampleBufferRef theBuffer = frameData.encodedSampleBuffer;
if (!self.active)
{
return;
@ -678,7 +686,7 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
if (av_interleaved_write_frame(_av_fmt_ctx, &pkt) < 0)
{
NSLog(@"VIDEO WRITE FRAME failed");
NSLog(@"VIDEO WRITE FRAME failed for %@", self.stream_output);
//[self stopProcess];
}
@ -773,21 +781,23 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
}
avio_close(_av_fmt_ctx->pb);
av_free(_av_fmt_ctx);
avformat_free_context(_av_fmt_ctx);
}
/*
if (_av_video_stream)
av_free(_av_video_stream);
if (_av_audio_stream)
av_free(_av_audio_stream);
*/
_av_fmt_ctx = NULL;
_av_video_stream = NULL;
_av_audio_stream = NULL;
if (_audio_extradata)
{
free(_audio_extradata);
//free(_audio_extradata);
_audio_extradata = NULL;
}

View file

@ -20,6 +20,8 @@
BOOL _active;
double _output_start_time;
NSMutableArray *_delayBuffer;
BOOL _stopped;
}

View file

@ -192,6 +192,7 @@
_output_start_time = 0.0f;
_delayBuffer = [[NSMutableArray alloc] init];
self.delay_buffer_frames = 0;
_stopped = YES;
}
return self;
@ -249,6 +250,8 @@
return;
}
NSObject <h264Compressor> *old_compressor = self.compressor;
if (self.compressor_name)
{
self.compressor = self.settingsController.compressors[self.compressor_name];
@ -270,6 +273,12 @@
[self.compressor addOutput:self];
[self.compressor addObserver:self forKeyPath:@"errored" options:NSKeyValueObservingOptionNew context:NULL];
}
if (old_compressor && (self.compressor != old_compressor))
{
[old_compressor removeOutput:self];
[old_compressor removeObserver:self forKeyPath:@"errored"];
}
}
- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
@ -337,17 +346,18 @@
{
if (!self.ffmpeg_out)
if (self.settingsController.captureRunning && !self.ffmpeg_out)
{
[self attachOutput];
}
sendData = [_delayBuffer objectAtIndex:0];
[_delayBuffer removeObjectAtIndex:0];
}
if (sendData)
if (sendData && self.ffmpeg_out)
{
[self.ffmpeg_out writeEncodedData:sendData];
}

View file

@ -47,6 +47,7 @@
-(bool) hasOutputs;
-(void) reset;
-(bool) validate:(NSError **)therror;
-(void) addAudioData:(CMSampleBufferRef)audioData;

View file

@ -39,6 +39,10 @@
copy.crf = self.crf;
copy.use_cbr = self.use_cbr;
copy.width = self.width;
copy.height = self.height;
copy.resolutionOption = self.resolutionOption;
return copy;
}
@ -56,6 +60,10 @@
[aCoder encodeInteger:self.keyframe_interval forKey:@"keyframe_interval"];
[aCoder encodeInteger:self.crf forKey:@"crf"];
[aCoder encodeBool:self.use_cbr forKey:@"use_cbr"];
[aCoder encodeInteger:self.width forKey:@"videoWidth"];
[aCoder encodeInteger:self.height forKey:@"videoHeight"];
[aCoder encodeObject:self.resolutionOption forKey:@"resolutionOption"];
}
-(id) initWithCoder:(NSCoder *)aDecoder
@ -71,6 +79,14 @@
self.crf = (int)[aDecoder decodeIntegerForKey:@"crf"];
self.use_cbr = [aDecoder decodeBoolForKey:@"use_cbr"];
self.keyframe_interval = (int)[aDecoder decodeIntegerForKey:@"keyframe_interval"];
self.width = (int)[aDecoder decodeIntegerForKey:@"videoWidth"];
self.height = (int)[aDecoder decodeIntegerForKey:@"videoHeight"];
if ([aDecoder decodeObjectForKey:@"resolutionOption"])
{
self.resolutionOption = [aDecoder decodeObjectForKey:@"resolutionOption"];
}
}
@ -119,6 +135,9 @@
-(void) reset
{
_compressor_queue = nil;
self.errored = NO;
_av_codec = NULL;
}
@ -163,6 +182,11 @@
}
if (frameData.videoFrame)
{
CVPixelBufferRetain(frameData.videoFrame);
}
dispatch_async(_compressor_queue, ^{
@ -183,26 +207,14 @@
CMTime pts = frameData.videoPTS;
size_t src_height;
size_t src_width;
enum PixelFormat frame_fmt;
CVImageBufferRef imageBuffer = frameData.videoFrame;
OSType cv_pixel_format = CVPixelBufferGetPixelFormatType(imageBuffer);
//NSLog(@"WIDTH INPUT %zd HEIGHT %zd", CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer));
if (cv_pixel_format == kCVPixelFormatType_422YpCbCr8)
{
frame_fmt = PIX_FMT_UYVY422;
} else if (cv_pixel_format == kCVPixelFormatType_422YpCbCr8FullRange) {
frame_fmt = PIX_FMT_YUYV422;
} else if (cv_pixel_format == kCVPixelFormatType_32BGRA) {
frame_fmt = PIX_FMT_BGRA;
} else {
frame_fmt = PIX_FMT_NV12;
}
src_height = CVPixelBufferGetHeight(imageBuffer);
src_width = CVPixelBufferGetWidth(imageBuffer);
@ -219,10 +231,12 @@
VTPixelTransferSessionTransferImage(_vtpt_ref, imageBuffer, converted_frame);
CVPixelBufferRelease(imageBuffer);
imageBuffer = nil;
//poke the frameData so it releases the video buffer
frameData.videoFrame = nil;
//CVPixelBufferRelease(imageBuffer);
AVFrame *outframe = avcodec_alloc_frame();
outframe->format = PIX_FMT_YUV420P;
outframe->width = (int)src_width;
@ -283,9 +297,12 @@
frameData.avcodec_ctx = _av_codec_ctx;
frameData.avcodec_pkt = pkt;
[self setAudioData:frameData syncObj:self];
for (id dKey in self.outputs)
{
OutputDestination *dest = self.outputs[dKey];
[dest writeEncodedData:frameData];
}
@ -384,22 +401,25 @@
id x264preset = self.preset;
if (x264preset != [NSNull null])
if (x264preset != nil)
{
NSLog(@"SETTING PRESET %@", x264preset);
av_dict_set(&opts, "preset", [x264preset UTF8String], 0);
}
id x264profile = self.profile;
if (x264profile != [NSNull null])
if (x264profile != nil)
{
NSLog(@"SETTING PROFILE %@", x264profile);
av_dict_set(&opts, "profile", [x264profile UTF8String], 0);
}
id x264tune = self.tune;
if (x264tune != [NSNull null])
if (x264tune != nil)
{
NSLog(@"SETTING TUNE %@", x264tune);
av_dict_set(&opts, "tune", [x264tune UTF8String], 0);
}
@ -412,6 +432,8 @@
_sws_ctx = NULL;
_audioBuffer = [[NSMutableArray alloc] init];
return YES;
}