Removed a bunch of NSLogs

Audio engine mixer nodes now dynamically allocate input busses as needed, and reuse ones from disconnected nodes.
This commit is contained in:
Zakk 2017-12-09 09:02:32 -05:00
parent 426df6465d
commit af6c5b1606
39 changed files with 55 additions and 109 deletions

View file

@ -531,7 +531,6 @@
-(void)dealloc
{
NSLog(@"MOVIE DEALLOC");
if (self.pcmPlayer)
{
[self deregisterPCMOutput];

View file

@ -662,7 +662,7 @@
av_frame_unref(conv_frame);
av_frame_free(&conv_frame);
}
if (!_first_frame)
if (!_first_frame && conv_frame)
{
_first_frame = av_frame_alloc();
av_frame_ref(_first_frame, conv_frame);

View file

@ -293,7 +293,6 @@
[self insertObject:item inInputQueueAtIndex:self.inputQueue.count];
if (self.inputQueue.count == 1)
{
NSLog(@"OPEN MEDIA");
[item openMedia:20];
}
@ -472,7 +471,6 @@
useInput = self.inputQueue.firstObject;
}
NSLog(@"USE INPUT %@", useInput);
if (useInput)
{

View file

@ -197,7 +197,6 @@
{
if (connection.output == _video_capture_output)
{
NSLog(@"DROPPED FRAME!!!");
}
}

View file

@ -403,7 +403,6 @@
-(void)stopCapture
{
NSLog(@"STOPPING CAPTURE!");
if (_deviceInput)
{
_deviceInput->StopStreams();

View file

@ -208,8 +208,6 @@
if (self.fillColor)
{
newLayer.fillColor = [self.fillColor CGColor];
NSLog(@"REF COUNT OF BG COLOR %ld", CFGetRetainCount(newLayer.fillColor));
}
if (self.lineColor)

View file

@ -223,7 +223,6 @@
{
NSLog(@"INJECTING %@", toInject);
self.injectSB = [SBApplication applicationWithProcessIdentifier:toInject.processIdentifier];
[self.injectSB setTimeout:10*60];

View file

@ -243,7 +243,6 @@
[_capture_session addOutput:_audio_capture_output];
} else {
NSLog(@"COULDN'T ADD AUDIO OUTPUT");
}
[_capture_session commitConfiguration];

View file

@ -9,7 +9,6 @@ animation_params = ["degrees"]
def do_animation():
NSLog("RUNNING ANIMATION")
source1 = inputByName('source1')
source2 = inputsByName('source2')

View file

@ -26,7 +26,6 @@
-(void)setDelay:(float)delay
{
_delay = delay;
NSLog(@"DELAY SET %f", delay);
AudioUnitSetParameter(self.audioUnit, kDelayParam_DelayTime, kAudioUnitScope_Global, 0, self.delay, 0);

View file

@ -63,7 +63,6 @@
OSStatus err = AudioUnitGetProperty(self.audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, outfmt, &outsize);
NSLog(@"MY OUTPUT FORMAT %d %f", err, outfmt->mSampleRate);
return outfmt;
}

View file

@ -320,7 +320,6 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
if ([self.graphOutputNode respondsToSelector:@selector(setOutputForDevice)])
{
NSLog(@"SET OUTPUT NODE");
[self.graphOutputNode setOutputForDevice];
}
@ -578,6 +577,7 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
-(void)addFileInput:(CAMultiAudioFile *)fileInput
{
[self attachFileInput:fileInput];
[self.fileInputs addObject:fileInput];
}
@ -659,7 +659,6 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
if (!ret)
{
NSLog(@"ADD NODE %@ FAILED", graphInput);
return NO;
}
@ -670,7 +669,6 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
ret = [self.graph addNode:dmix];
if (!ret)
{
NSLog(@"ADD MIXER %@ failed", dmix);
[self disconnectInputNode:input];
return NO;
}
@ -679,7 +677,6 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
ret = [self.graph addNode:eq];
if (!ret)
{
NSLog(@"ADD EQ %@ failed", eq);
[self disconnectInputNode:input];
return NO;
}
@ -688,7 +685,6 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
if (![self.graph connectNode:eq toNode:self.encodeMixer])
{
NSLog(@"CONNECT EQ TO ENCODE FAILED");
[self disconnectInputNode:input];
return NO;
}
@ -708,14 +704,12 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
ret = [self.graph addNode:delayNode];
if (!ret)
{
NSLog(@"ADD DELAY %@ (%d) failed %d", delayNode, i, input.channelCount);
[self disconnectInputNode:input];
return NO;
}
ret = [self.graph connectNode:delayNode toNode:connectNode];
if (!ret)
{
NSLog(@"CONNECT DELAY %@ (%d) failed", delayNode, i);
[self.graph removeNode:delayNode];
[self disconnectInputNode:input];
@ -728,7 +722,6 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
if (![self.graph connectNode:dmix toNode:connectNode])
{
NSLog(@"CONNECT EQ/DMIX FAILED");
[self disconnectInputNode:input];
return NO;
}
@ -737,7 +730,6 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
if (input.converterNode)
{
[self.graph addNode:input.converterNode];
NSLog(@"ADDED CONVERTER");
if (![self.graph connectNode:input.converterNode toNode:dmix])
{
[self disconnectInputNode:input];
@ -754,7 +746,6 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
if (![self.graph connectNode:graphInput toNode:dmix])
{
NSLog(@"LAST CONNECT FAILED %@", graphInput);
[self disconnectInputNode:input];
return NO;
}
@ -765,6 +756,7 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
dmix.volume = graphInput.volume;
}
return YES;
}
@ -812,7 +804,6 @@ OSStatus encoderRenderCallback( void *inRefCon, AudioUnitRenderActionFlags *ioAc
-(bool)attachInput:(CAMultiAudioInput *)input
{
bool ret = [self attachInputCommon:input];
if (!ret)
{

View file

@ -192,11 +192,13 @@
}
OSStatus err;
/*
if (![self stopGraph])
{
NSLog(@"Graph %@: graphUpdate, stopGraph failed", self);
return NO;
}
*/
err = AUGraphUpdate(_graphInst, NULL);
if (err)
@ -205,12 +207,13 @@
return NO;
}
/*
if (![self startGraph])
{
NSLog(@"Graph %@: graphUpdate, startGraph failed", self);
return NO;
}
}*/
return YES;
}
@ -266,12 +269,11 @@
if (![self graphUpdate])
{
NSLog(@"Graph %@ graphUpdate for connection failed %@ -> %@", self, node, toNode);
return NO;
}
return YES;
}

View file

@ -23,7 +23,7 @@
-(void)willInitializeNode
{
UInt32 elementCount = 32;
UInt32 elementCount = 64;
OSStatus err = AudioUnitSetProperty(self.audioUnit, kAudioUnitProperty_ElementCount, kAudioUnitScope_Input, 0,&elementCount, sizeof(UInt32));
@ -47,9 +47,7 @@
[self setOutputVolume];
for (UInt32 i = 0; i < 15; i++) {
[self setVolumeOnInputBus:i volume:1.0];
}
}
@ -117,27 +115,51 @@
-(UInt32)getNextElement
{
UInt32 elementCount = 0;
UInt32 elementSize = 0;
UInt32 elementSize = sizeof(UInt32);
//AudioUnitUninitialize(self.audioUnit);
AudioUnitSetProperty(self.audioUnit, kAudioUnitProperty_ElementCount, kAudioUnitScope_Input, 0, &elementCount, elementSize);
// AudioUnitInitialize(self.audioUnit);
UInt32 useElement = 0;
AudioUnitGetProperty(self.audioUnit, kAudioUnitProperty_ElementCount, kAudioUnitScope_Input, 0, &elementCount, &elementSize);
/*
UInt32 interactionCnt = elementCount*2;
UInt32 interactionCnt = 0;
AUGraphCountNodeInteractions(self.graph.graphInst, self.node, &interactionCnt);
AUNodeInteraction *interactions = malloc(sizeof(AUNodeInteraction)*interactionCnt);
AUGraphGetNodeInteractions(self.graph.graphInst, self.node, &interactionCnt, interactions);
*/
//Naive implementation. bump up element count by one and return that as the bus to connect to.
useElement = 0;
UInt32 seenIdx = 0;
//elementCount = 3;
for (int i=0; i < interactionCnt; i++)
{
//NSLog(@"RETURNING ELEMENT %d", elementCount-1);
return _nextElement++;
AUNodeInteraction iact = interactions[i];
if (iact.nodeInteractionType == kAUNodeInteraction_Connection && iact.nodeInteraction.connection.destNode == self.node)
{
if (seenIdx != iact.nodeInteraction.connection.destInputNumber)
{
useElement = seenIdx;
break;
} else {
seenIdx++;
useElement = iact.nodeInteraction.connection.destInputNumber+1;
}
}
}
free(interactions);
if (useElement >= elementCount)
{
elementCount += 64;
AudioUnitSetProperty(self.audioUnit, kAudioUnitProperty_ElementCount, kAudioUnitScope_Input, 0, &elementCount, sizeof(elementCount));
}
[self setVolumeOnInputBus:useElement volume:1.0];
return useElement;
}

View file

@ -382,7 +382,6 @@ void BufferCompletedPlaying(void *userData, ScheduledAudioSlice *bufferList);
-(void)dealloc
{
NSLog(@"DEALLOC PCM PLAYER");
[self flush];
_pendingBuffers = nil;
if (_inputFormat)

View file

@ -64,7 +64,6 @@
CAShow(_audioUnit);
OSStatus err = AudioOutputUnitStart(_audioUnit);
NSLog(@"AU OUTPUT START %d", err);
}
@end

View file

@ -10,7 +10,7 @@ var runAnimationForLayoutWithExtraDictionary = function(animation_string, layout
try {
setCompletionBlock(function() { console.log("COMPLETION BLOCK");});
//setCompletionBlock(function() { console.log("COMPLETION BLOCK");});
eval(animation_string);
}
catch(err) {

View file

@ -53,7 +53,6 @@
{
if (self = [self init])
{
NSLog(@"INIT WITH NODE: NODE UUID %@", node.nodeUID);
self.audioUUID = node.nodeUID;
self.audioVolume = node.volume;
@ -94,7 +93,6 @@
{
NSDictionary *userData = notification.userInfo;
NSString *nodeUUID = userData[@"UUID"];
NSLog(@"NOTIFICAION FOR %@", nodeUUID);
if (nodeUUID && [nodeUUID isEqualToString:self.audioUUID])
{
[self applyAudioSettings];

View file

@ -521,7 +521,6 @@
}
-(void)dealloc
{
NSLog(@"CAPTURE BASE DEALLOC");
if (self.timerDelegate)
{
[self.timerDelegate frameTimerWillStop:self.timerDelegateCtx];

View file

@ -232,7 +232,6 @@
if (infologLength > 0)
{
NSLog(@"LOG FOR SHADER %@: %s\n",shaderPath, infoLog);
}
}
@ -660,7 +659,6 @@
-(void)releaseCGLContext:(CGLContextObj)ctx
{
NSLog(@"RELEASE CGL CONTEXT!");
CGLDestroyContext(ctx);
}

View file

@ -301,7 +301,6 @@
{
if (self = [super init])
{
NSLog(@"INPUT LAYER INIT %@", self);
self.minificationFilter = kCAFilterTrilinear;
self.magnificationFilter = kCAFilterTrilinear;

View file

@ -94,7 +94,6 @@
-(void)stopRecordingForOutput:(OutputDestination *)output
{
NSLog(@"STOP RECORDING");
OutputDestination *useOut;
for (OutputDestination *tmpOut in self.outputs)
@ -110,7 +109,6 @@
useOut.captureRunning = NO;
[useOut stopOutput];
NSLog(@"RESETTING OUTPUT");
[useOut reset];
[self.outputs removeObject:useOut];
@ -170,7 +168,7 @@
{
NSLog(@"RECORDING ACTIVE? %d", self.recordingActive);
if (!self.recordingActive)
{
@ -194,7 +192,6 @@
if (!self.audioEngine)
{
NSLog(@"CREATING AUDIO ENGINE");
self.audioEngine = [[CAMultiAudioEngine alloc] init];
self.audioEngine.sampleRate = [CaptureController sharedCaptureController].audioSamplerate;
@ -540,7 +537,6 @@
if (CMTIME_COMPARE_INLINE(_firstAudioTime, ==, kCMTimeZero))
{
NSLog(@"FIRST AUDIO AT %f", CFAbsoluteTimeGetCurrent());
_firstAudioTime = orig_pts;
return;

View file

@ -289,10 +289,6 @@
return NO;
}
-(void)mouseEntered:(NSEvent *)theEvent
{
NSLog(@"MOUSE ENTERED!!!");
}
@end

View file

@ -73,7 +73,6 @@
if (configViewController)
{
NSLog(@"CONFIG VIEW C %@", configViewController);
//Be gross like input view controllers!
configViewController.sequenceItem = self;
}

View file

@ -14,10 +14,8 @@
-(void)animationDidStart:(CAAnimation *)anim
{
NSLog(@"ANIMATION DID START");
[CATransaction begin];
[CATransaction setCompletionBlock:^{
NSLog(@"TRANSACTION COMPLETION BLOCK");
for (InputSource *nSrc in self.addedInputs)
{
if (nSrc.layer)
@ -87,9 +85,11 @@
}
/*
-(void)animationDidStop:(CAAnimation *)anim finished:(BOOL)flag
{
NSLog(@"DELEGATE ANIMATION STOPPED");
}
}*/
@end

View file

@ -1431,7 +1431,6 @@
//Reassert recording flag for save
if (forLayout)
{
NSLog(@"REASSERT RECORDING");
forLayout.recordingLayout = YES;
}
@ -1581,7 +1580,6 @@
PyObject *ret = PyRun_File(runnerFile, (char *)[[fromFile lastPathComponent] UTF8String], Py_file_input, dict_copy, dict_copy);
if (!ret)
{
NSLog(@"PYTHON RETURNED NULL!");
PyErr_Print();
return nil;
}
@ -2087,7 +2085,6 @@
self.compressors = [[saveRoot valueForKey:@"compressors"] mutableCopy];
NSLog(@"COMPRESSORS %@", self.compressors);
if (!self.compressors)
{
@ -2307,7 +2304,6 @@
{
if (layout.recordingLayout)
{
NSLog(@"RECORD LAYOUT");
[self startRecordingLayout:layout];
}
}
@ -2740,7 +2736,6 @@
outdest.settingsController = self.mainLayoutRecorder;
if (outdest.active)
{
NSLog(@"RESET AND SETUP %@", outdest);
[outdest reset];
[outdest setup];

View file

@ -229,7 +229,6 @@ void __ProResPixelBufferRelease( void *releaseRefCon, const void *baseAddress )
if (status != noErr || !_compression_session)
{
NSLog(@"COMPRESSOR SETUP ERROR");
self.errored = YES;
return NO;
}

View file

@ -234,10 +234,9 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
{
CFArrayRef encoders = NULL;
// CFArrayRef encoders = NULL;
VTCopyVideoEncoderList(NULL, &encoders);
NSLog(@"ENCODERS %@", encoders);
// VTCopyVideoEncoderList(NULL, &encoders);
NSMutableDictionary *encoderSpec = [[NSMutableDictionary alloc] init];
encoderSpec[(__bridge NSString *)kVTVideoEncoderSpecification_RequireHardwareAcceleratedVideoEncoder] = @YES;
@ -301,7 +300,6 @@ void PixelBufferRelease( void *releaseRefCon, const void *baseAddress )
if (status != noErr || !_compression_session)
{
NSLog(@"COMPRESSOR SETUP ERROR");
self.errored = YES;
return NO;
}

View file

@ -169,7 +169,6 @@
{
@synchronized (self) {
_reset_flag = YES;
NSLog(@"DISPATCH SIGNAL FOR RESET");
dispatch_semaphore_signal(_queueSemaphore);
}
}
@ -390,7 +389,6 @@
[dest writeEncodedData:frameData];
}
NSLog(@"DID NOT ENCODE");
return NO;
}
@ -673,7 +671,6 @@
_av_codec = NULL;
av_dict_free(&opts);
NSLog(@"CODEC SETUP FAILED!");
return NO;
}

View file

@ -17,10 +17,6 @@
#include "libavformat/avformat.h"
-(void)dealloc
{
NSLog(@"DEALLOC FFMPEG");
}
-(NSUInteger)frameQueueSize
{
@ -359,7 +355,6 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
if (!_av_fmt_ctx)
{
NSLog(@"NO FMT CTX");
return NO;
}
@ -372,7 +367,6 @@ void getAudioExtradata(char *cookie, char **buffer, size_t *size)
if (!_av_video_stream)
{
NSLog(@"VIDEO STREAM SETUP FAIL");
return NO;
}

View file

@ -682,7 +682,6 @@ static NSArray *_sourceTypes = nil;
-(void)commonInit
{
NSLog(@"COMMON INIT %@", self);
[CATransaction begin];
self.name = nil;
_nextImageTime = 0.0f;
@ -1476,7 +1475,6 @@ static NSArray *_sourceTypes = nil;
-(void)dealloc
{
NSLog(@"INPUT SOURCE DEALLOC");
[self deregisterVideoInput:self.videoInput];
for(id vInput in self.videoSources)
{
@ -2150,7 +2148,6 @@ static NSArray *_sourceTypes = nil;
if (!self.videoInput || NSEqualSizes(NSZeroSize, self.videoInput.captureSize))
{
NSLog(@"NO SIZE FOR RESET");
return;
}
@ -2167,7 +2164,6 @@ static NSArray *_sourceTypes = nil;
resize_style resizeSave = self.resizeType;
self.resizeType = kResizeTop | kResizeRight | kResizeFree;
NSLog(@"NEW SIZE %f %f", width, height);
[self updateSize:width height:height];
self.resizeType = resizeSave;
}
@ -3549,7 +3545,6 @@ static NSArray *_sourceTypes = nil;
{
if (self.videoInput)
{
NSLog(@"RETURNING VIDEO INPUT DURATION %@ %f", self.videoInput, self.videoInput.duration);
return self.videoInput.duration;
}

View file

@ -208,7 +208,6 @@
//self.editLayout.canvas_width = parent_width;
//self.editLayout.canvas_height = parent_height;
NSLog(@"PARENT WIDTH %f HEIGHT %f", parent_width, parent_height);
//[self.editLayout addSource:iSrc];

View file

@ -19,7 +19,6 @@
-(void)setRepresentedObject:(id)representedObject
{
[super setRepresentedObject:representedObject];
NSLog(@"SET REPRESENTED OBJECT %@", representedObject);
[self.representedObject addObserver:self forKeyPath:@"in_live" options:NSKeyValueObservingOptionNew context:NULL];
[self.representedObject addObserver:self forKeyPath:@"in_staging" options:NSKeyValueObservingOptionNew context:NULL];
@ -32,8 +31,6 @@
AppDelegate *appDel = [NSApp delegate];
self.captureController = appDel.captureController;
NSLog(@"LAYOUT BUTTON %@", self.layoutButton);
NSLog(@"REPRESENT AWAKE %@", self.representedObject);

View file

@ -68,7 +68,6 @@
-(void)saveCompressPanel
{
NSLog(@"SAVING COMPRESS PANEL %@ %@", self.compressorObjectController, self.compressorViewController);
[self.compressorObjectController commitEditing];
[self.compressorViewController commitEditing];
[self.window.sheetParent endSheet:self.window returnCode:NSModalResponseOK];

View file

@ -53,7 +53,6 @@
BOOL tmp;
[invocation getArgument:&tmp atIndex:idx];
[argArray addObject:@(tmp)];
NSLog(@"BOOL ARG IS %hhd", tmp);
} else if (ISARGUMENTTYPE(char, aType)) {
char tmp;
@ -112,11 +111,8 @@
id tmp;
[invocation getArgument:&tmp atIndex:idx];
[argArray addObject:tmp];
NSLog(@"TMP IS %@", tmp);
} else {
NSLog(@"UNSUPPORTED TYPE");
[argArray addObject:@(0)];
}
}
@ -177,7 +173,6 @@
NSString *jsFunction = [self mangleName:selName];
NSLog(@"CALL JSFUNCTION %@", jsFunction);
if (!self.jsObject[jsFunction].isUndefined)
{
NSArray *argArray = [self argumentListForInvocation:anInvocation];
@ -207,7 +202,6 @@
if (!self.jsObject[jsFunction].isUndefined)
{
NSLog(@"DID IMPLEMENT");
return YES;
}

View file

@ -115,8 +115,6 @@
glLoadIdentity();
glOrtho(0, _cvpool_size.width, 0,_cvpool_size.height, -1, 1);
NSLog(@"CVPOOL %@", NSStringFromSize(_cvpool_size));
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
@ -312,7 +310,6 @@
-(bool) createPixelBufferPoolForSize:(NSSize) size
{
NSLog(@"Controller: Creating Pixel Buffer Pool %f x %f LAYOUT %@", size.width, size.height, self.layout);
NSMutableDictionary *attributes = [NSMutableDictionary dictionary];
[attributes setValue:[NSNumber numberWithInt:size.width] forKey:(NSString *)kCVPixelBufferWidthKey];

View file

@ -1400,7 +1400,6 @@
{
InputSource *newSource = toClone.copy;
[self.sourceLayout addSource:newSource];
NSLog(@"NEW SOURCE UUID %@ PARENT %@", newSource.uuid, toClone.uuid);
[[self.undoManager prepareWithInvocationTarget:self] undoCloneInput:newSource.uuid parentUUID:toClone.uuid];
}

View file

@ -80,8 +80,6 @@ JS_EXPORT void JSSynchronousGarbageCollectForDebugging(JSContextRef ctx);
JSValue *scriptFunc = jsCtx[@"runTriggerScriptInput"];
if (scriptFunc)
{
NSLog(@"CALLING FOR %@ LAYER %@", input, input.layer);
[scriptFunc callWithArguments:@[input, scriptName]];
}
}

View file

@ -213,7 +213,6 @@ void qt_xpc_list_devices(xpc_connection_t conn, xpc_object_t event)
{
NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
NSLog(@"DEVICES IN HELPER %@", devices);
QTCaptureDevice *devinstance;