OverSight/Application/Application/AVMonitor.m

1981 lines
62 KiB
Objective-C

//
// AVMonitor.m
// Application
//
// Created by Patrick Wardle on 4/30/21.
// Copyright © 2021 Objective-See. All rights reserved.
//
@import OSLog;
@import AVFoundation;
#import "consts.h"
#import "Client.h"
#import "AVMonitor.h"
#import "utilities.h"
#import "AppDelegate.h"
/* GLOBALS */
//log handle
extern os_log_t logHandle;
@implementation AVMonitor
//init
-(id)init
{
//action: ok
UNNotificationAction *ok = nil;
//action: allow
UNNotificationAction *allow = nil;
//action: allow
UNNotificationAction *allowAlways = nil;
//action: block
UNNotificationAction *block = nil;
//close category
UNNotificationCategory* closeCategory = nil;
//action category
UNNotificationCategory* actionCategory = nil;
//super
self = [super init];
if(nil != self)
{
//init log monitor
self.logMonitor = [[LogMonitor alloc] init];
//init audio attributions
self.audioAttributions = [NSMutableArray array];
//init camera attributions
self.cameraAttributions = [NSMutableArray array];
//init audio listeners
self.audioListeners = [NSMutableDictionary dictionary];
//init video listeners
self.cameraListeners = [NSMutableDictionary dictionary];
//init event queue
self.eventQueue = dispatch_queue_create([[NSString stringWithFormat:@"%s.eventQueue", BUNDLE_ID] UTF8String], DISPATCH_QUEUE_CONCURRENT);
//set up delegate
UNUserNotificationCenter.currentNotificationCenter.delegate = self;
//init ok action
ok = [UNNotificationAction actionWithIdentifier:@"Ok" title:@"Ok" options:UNNotificationActionOptionNone];
//init close category
closeCategory = [UNNotificationCategory categoryWithIdentifier:CATEGORY_CLOSE actions:@[ok] intentIdentifiers:@[] options:0];
//init allow action
allow = [UNNotificationAction actionWithIdentifier:@"Allow" title:@"Allow (Once)" options:UNNotificationActionOptionNone];
//init allow action
allowAlways = [UNNotificationAction actionWithIdentifier:@"AllowAlways" title:@"Allow (Always)" options:UNNotificationActionOptionNone];
//init block action
block = [UNNotificationAction actionWithIdentifier:@"Block" title:@"Block" options:UNNotificationActionOptionNone];
//init category
actionCategory = [UNNotificationCategory categoryWithIdentifier:CATEGORY_ACTION actions:@[allow, allowAlways, block] intentIdentifiers:@[] options:UNNotificationCategoryOptionCustomDismissAction];
//set categories
[UNUserNotificationCenter.currentNotificationCenter setNotificationCategories:[NSSet setWithObjects:closeCategory, actionCategory, nil]];
//per device events
self.deviceEvents = [NSMutableDictionary dictionary];
//find built-in mic
self.builtInMic = [self findBuiltInMic];
//dbg msg
os_log_debug(logHandle, "built-in mic: %{public}@ (device ID: %d)", self.builtInMic.localizedName, [self getAVObjectID:self.builtInMic]);
//find built-in camera
self.builtInCamera = [self findBuiltInCamera];
//dbg msg
os_log_debug(logHandle, "built-in camera: %{public}@ (device ID: %d)", self.builtInCamera.localizedName, [self getAVObjectID:self.builtInCamera]);
}
return self;
}
//monitor AV
// also generate alerts as needed
-(void)start
{
//dbg msg
os_log_debug(logHandle, "starting AV monitoring");
//start log monitor
[self startLogMonitor];
//watch all input audio (mic) devices
for(AVCaptureDevice* audioDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio])
{
//start (device) monitor
[self watchAudioDevice:audioDevice];
}
//watch all input video (cam) devices
for(AVCaptureDevice* videoDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo])
{
//start (device) monitor
[self watchVideoDevice:videoDevice];
}
//dbg msg
os_log_debug(logHandle, "registering for device connection/disconnection notifications");
//handle new device connections
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(handleConnectedDeviceNotification:) name:AVCaptureDeviceWasConnectedNotification object:nil];
//handle device disconnections
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(handleDisconnectedDeviceNotification:) name:AVCaptureDeviceWasDisconnectedNotification object:nil];
return;
}
//new device is connected
// get its type, then watch it for events
-(void)handleConnectedDeviceNotification:(NSNotification *)notification
{
//device
AVCaptureDevice* device = NULL;
//type
device = notification.object;
//dbg msg
os_log_debug(logHandle, "new device connected: %{public}@", device.localizedName);
//audio devive
if(YES == [device hasMediaType:AVMediaTypeAudio])
{
//watch
[self watchAudioDevice:device];
}
//video device
else if(YES == [device hasMediaType:AVMediaTypeVideo])
{
//watch
[self watchVideoDevice:device];
}
return;
}
//device is disconnected
-(void)handleDisconnectedDeviceNotification:(NSNotification *)notification
{
//device
AVCaptureDevice* device = NULL;
//type
device = notification.object;
//dbg msg
os_log_debug(logHandle, "device disconnected: %{public}@", device.localizedName);
//audio devive
if(YES == [device hasMediaType:AVMediaTypeAudio])
{
//unwatch
[self unwatchAudioDevice:device];
}
//video device
else if(YES == [device hasMediaType:AVMediaTypeVideo])
{
//unwatch
[self unwatchVideoDevice:device];
}
return;
}
//log monitor
-(void)startLogMonitor
{
//dbg msg
os_log_debug(logHandle, "starting log monitor for AV events");
//macOS 14+
if(@available(macOS 14.0, *)) {
//regex for mic log msg
NSRegularExpression* micRegex = nil;
//regex for camera log msg
NSRegularExpression* cameraRegex = nil;
//dbg msg
os_log_debug(logHandle, ">= macOS 14+: Using log monitor for AV events via w/ (camera): 'added <private> endpoint <private> camera <private>' AND (mic): '-[MXCoreSession beginInterruption]: Session <ID: xx, PID = xyz,...'");
//init mic regex
micRegex = [NSRegularExpression regularExpressionWithPattern:@"PID = (\\d+)" options:0 error:nil];
//init cam regex
cameraRegex = [NSRegularExpression regularExpressionWithPattern:@"\\[\\{private\\}(\\d+)\\]" options:0 error:nil];
//start log monitoring
[self.logMonitor start:[NSPredicate predicateWithFormat:@"subsystem=='com.apple.cmio' OR subsystem=='com.apple.coremedia'"] level:Log_Level_Default callback:^(OSLogEvent* logEvent) {
//sync to process
@synchronized (self) {
//match
NSTextCheckingResult* match = nil;
//pid
NSInteger pid = 0;
//camera:
// "added <private> endpoint <private> camera <private> = <pid>;"
if( (YES == [logEvent.subsystem isEqual:@"com.apple.cmio"]) &&
(YES == [logEvent.composedMessage hasSuffix:@"added <private> endpoint <private> camera <private>"]) )
{
//reset
self.lastCameraClient = 0;
//match on pid
match = [cameraRegex firstMatchInString:logEvent.composedMessage options:0 range:NSMakeRange(0, logEvent.composedMessage.length)];
if( (nil == match) ||
(NSNotFound == match.range.location) )
{
return;
}
//extract/convert pid
pid = [[logEvent.composedMessage substringWithRange:[match rangeAtIndex:1]] integerValue];
if( (0 == pid) ||
(-1 == pid) )
{
return;
}
//save
self.lastCameraClient = pid;
}
//mic:
// "-[MXCoreSession beginInterruption]: Session <ID: xx, PID = xyz, ...":
else if( (YES == [logEvent.subsystem isEqual:@"com.apple.coremedia"]) &&
(YES == [logEvent.composedMessage hasPrefix:@"-MXCoreSession- -[MXCoreSession beginInterruption]"]) &&
(YES == [logEvent.composedMessage hasSuffix:@"Recording = YES> is going active"]) )
{
//reset
self.lastMicClient = 0;
//match on pid
match = [micRegex firstMatchInString:logEvent.composedMessage options:0 range:NSMakeRange(0, logEvent.composedMessage.length)];
if( (nil == match) ||
(NSNotFound == match.range.location) )
{
return;
}
//extract/convert pid
pid = [[logEvent.composedMessage substringWithRange:[match rangeAtIndex:1]] integerValue];
if( (0 == pid) ||
(-1 == pid) )
{
return;
}
//save
self.lastMicClient = pid;
}
//msg not of interest
else
{
return;
}
}
}];
}
//macOS 13.3+
// use predicate: "subsystem=='com.apple.cmio'" looking for 'CMIOExtensionPropertyDeviceControlPID'
else if (@available(macOS 13.3, *)) {
//regex
NSRegularExpression* regex = nil;
//dbg msg
os_log_debug(logHandle, ">= macOS 13.3+: Using 'CMIOExtensionPropertyDeviceControlPID'");
//init regex
regex = [NSRegularExpression regularExpressionWithPattern:@"=\\s*(\\d+)\\s*;" options:0 error:nil];
//start logging
[self.logMonitor start:[NSPredicate predicateWithFormat:@"subsystem=='com.apple.cmio'"] level:Log_Level_Debug callback:^(OSLogEvent* logEvent) {
//match
NSTextCheckingResult* match = nil;
//pid
NSInteger pid = 0;
//sync to process
@synchronized (self) {
//only interested on "CMIOExtensionPropertyDeviceControlPID = <pid>;" msgs
if(YES != [logEvent.composedMessage containsString:@"CMIOExtensionPropertyDeviceControlPID = "])
{
return;
}
//reset
self.lastCameraClient = 0;
//match on pid
match = [regex firstMatchInString:logEvent.composedMessage options:0 range:NSMakeRange(0, logEvent.composedMessage.length)];
if( (nil == match) ||
(NSNotFound == match.range.location) )
{
return;
}
//extract/convert pid
pid = [[logEvent.composedMessage substringWithRange:[match rangeAtIndex:1]] integerValue];
if( (0 == pid) ||
(-1 == pid) )
{
return;
}
//save
self.lastCameraClient = pid;
}
}];
}
//previous versions of macOS
// use predicate: "subsystem=='com.apple.SystemStatus'"
else
{
//dbg msg
os_log_debug(logHandle, "< macOS 13.3+: Using 'com.apple.SystemStatus'");
//start logging
[self.logMonitor start:[NSPredicate predicateWithFormat:@"subsystem=='com.apple.SystemStatus'"] level:Log_Level_Default callback:^(OSLogEvent* logEvent) {
//sync to process
@synchronized (self) {
//flags
BOOL audioAttributionsList = NO;
BOOL cameraAttributionsList = NO;
//new audio attributions
NSMutableArray* newAudioAttributions = nil;
//new camera attributions
NSMutableArray* newCameraAttributions = nil;
//only interested on "Server data changed..." msgs
if(YES != [logEvent.composedMessage containsString:@"Server data changed for media domain"])
{
return;
}
//split on newlines
// ...and then parse out audio/camera attributions
for(NSString* __strong line in [logEvent.composedMessage componentsSeparatedByString:@"\n"])
{
//pid
NSNumber* pid = 0;
//trim
line = [line stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceCharacterSet]];
//'audioAttributions' list?
if( (YES == [line hasPrefix:@"audioAttributions = "]) ||
(YES == [line hasPrefix:@"audioRecordingAttributions = "]) )
{
//dbg msg
os_log_debug(logHandle, "found 'audio attributions'");
//set flag
audioAttributionsList = YES;
//init
newAudioAttributions = [NSMutableArray array];
//unset (other) list
cameraAttributionsList = NO;
//next
continue;
}
//'cameraAttributions' list?
if( (YES == [line hasPrefix:@"cameraAttributions = "]) ||
(YES == [line hasPrefix:@"cameraCaptureAttributions = "]) )
{
//dbg msg
os_log_debug(logHandle, "found 'camera attributions'");
//set flag
cameraAttributionsList = YES;
//init
newCameraAttributions = [NSMutableArray array];
//unset (other) list
audioAttributionsList = NO;
//next
continue;
}
//audit token of item?
if(YES == [line containsString:@"<BSAuditToken:"])
{
//dbg msg
os_log_debug(logHandle, "line has audit token...");
//pid extraction regex
NSRegularExpression* regex = nil;
//match
NSTextCheckingResult* match = nil;
//init regex
regex = [NSRegularExpression regularExpressionWithPattern:@"(?<=PID: )[0-9]*" options:0 error:nil];
//match/extract pid
match = [regex firstMatchInString:line options:0 range:NSMakeRange(0, line.length)];
if( (nil == match) ||
(NSNotFound == match.range.location))
{
//dbg msg
os_log_debug(logHandle, "no match on regex");
//ignore
continue;
}
//extract pid
pid = @([[line substringWithRange:[match rangeAtIndex:0]] intValue]);
//dbg msg
os_log_debug(logHandle, "pid: %@", pid);
//in audio list?
if(YES == audioAttributionsList)
{
//dbg msg
os_log_debug(logHandle, "...for audio");
//add
[newAudioAttributions addObject:[NSNumber numberWithInt:[pid intValue]]];
}
//in camera list?
else if(YES == cameraAttributionsList)
{
//dbg msg
os_log_debug(logHandle, "...for camera");
//add
[newCameraAttributions addObject:[NSNumber numberWithInt:[pid intValue]]];
}
//next
continue;
}
}
//macOS 12: off events trigger the removal of the list
// so then we'll just pass in an empty list in that case
if(12 == NSProcessInfo.processInfo.operatingSystemVersion.majorVersion)
{
//nil?
if(nil == newAudioAttributions)
{
//init blank
newAudioAttributions = [NSMutableArray array];
}
//nil?
if(nil == newCameraAttributions)
{
//init blank
newCameraAttributions = [NSMutableArray array];
}
}
//process attibutions
[self processAttributions:newAudioAttributions newCameraAttributions:newCameraAttributions];
}//sync
}];
}
return;
}
//process attributions
// will generate (any needed) events to trigger alerts to user
-(void)processAttributions:(NSMutableArray*)newAudioAttributions newCameraAttributions:(NSMutableArray*)newCameraAttributions
{
//audio differences
NSOrderedCollectionDifference* audioDifferences = nil;
//camera differences
NSOrderedCollectionDifference* cameraDifferences = nil;
//client
__block Client* client = nil;
//event
__block Event* event = nil;
//dbg msg
os_log_debug(logHandle, "method '%s' invoked", __PRETTY_FUNCTION__);
//diff audio differences
if( (nil != newAudioAttributions) &&
(nil != self.audioAttributions) )
{
//diff
audioDifferences = [newAudioAttributions differenceFromArray:self.audioAttributions];
}
//diff camera differences
if( (nil != newCameraAttributions) &&
(nil != self.cameraAttributions) )
{
//diff
cameraDifferences = [newCameraAttributions differenceFromArray:self.cameraAttributions];
}
/* audio event logic */
//new audio event?
// handle (lookup mic, send event)
if(YES == audioDifferences.hasChanges)
{
//dbg msg
os_log_debug(logHandle, "new audio event");
//active mic
AVCaptureDevice* activeMic = nil;
//audio off?
// sent event
if(0 == audioDifferences.insertions.count)
{
//dbg msg
os_log_debug(logHandle, "audio event: off");
//init event
// process (client) and device are nil
event = [[Event alloc] init:nil device:nil deviceType:Device_Microphone state:NSControlStateValueOff];
//handle event
[self handleEvent:event];
}
//audio on?
// send event
else
{
//dbg msg
os_log_debug(logHandle, "audio event: on");
//send event for each process (attribution)
for(NSOrderedCollectionChange* audioAttribution in audioDifferences.insertions)
{
//init client from attribution
client = [[Client alloc] init];
client.pid = audioAttribution.object;
client.path = valueForStringItem(getProcessPath(client.pid.intValue));
client.name = valueForStringItem(getProcessName(client.path));
//look for active mic
for(AVCaptureDevice* microphone in [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio])
{
//off? skip
if(NSControlStateValueOn != [self getMicState:microphone])
{
//skip
continue;
}
//dbg msg
os_log_debug(logHandle, "audio device: %{public}@/%{public}@ is on", microphone.manufacturer, microphone.localizedName);
//save
activeMic = microphone;
//init event
// with client and (active) mic
event = [[Event alloc] init:client device:activeMic deviceType:Device_Microphone state:NSControlStateValueOn];
//handle event
[self handleEvent:event];
}
//no mic found? (e.g. headphones as input)
// show (limited) alert
if(nil == activeMic)
{
//init event
// devivce is nil
event = [[Event alloc] init:client device:nil deviceType:Device_Microphone state:NSControlStateValueOn];
//handle event
[self handleEvent:event];
}
}
}
} //audio event
/* camera event logic */
//new camera event?
// handle (lookup camera, send event)
if(YES == cameraDifferences.hasChanges)
{
//dbg msg
os_log_debug(logHandle, "new camera event");
//active camera
AVCaptureDevice* activeCamera = nil;
//camera off?
// send event
if(0 == cameraDifferences.insertions.count)
{
//dbg msg
os_log_debug(logHandle, "camera event: off");
//init event
// process (client) and device are nil
event = [[Event alloc] init:nil device:nil deviceType:Device_Camera state:NSControlStateValueOff];
//handle event
[self handleEvent:event];
}
//camera on?
// send event
else
{
//dbg msg
os_log_debug(logHandle, "camera event: on");
//send event for each process (attribution)
for(NSOrderedCollectionChange* cameraAttribution in cameraDifferences.insertions)
{
//init client from attribution
client = [[Client alloc] init];
client.pid = cameraAttribution.object;
client.path = valueForStringItem(getProcessPath(client.pid.intValue));
client.name = valueForStringItem(getProcessName(client.path));
//look for active camera
for(AVCaptureDevice* camera in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo])
{
//off? skip
if(NSControlStateValueOn != [self getCameraState:camera])
{
//skip
continue;
}
//virtual
// TODO: is there a better way to determine this?
if(YES == [camera.localizedName containsString:@"Virtual"])
{
//skip
continue;
}
//dbg msg
os_log_debug(logHandle, "camera device: %{public}@/%{public}@ is on", camera.manufacturer, camera.localizedName);
//save
activeCamera = camera;
//init event
// with client and (active) camera
event = [[Event alloc] init:client device:activeCamera deviceType:Device_Camera state:NSControlStateValueOn];
//handle event
[self handleEvent:event];
}
//no camera found?
// show (limited) alert
if(nil == activeCamera)
{
//init event
// devivce is nil
event = [[Event alloc] init:client device:nil deviceType:Device_Camera state:NSControlStateValueOn];
//handle event
[self handleEvent:event];
}
}
}
} //camera event
//update audio attributions
self.audioAttributions = [newAudioAttributions copy];
//update camera attributions
self.cameraAttributions = [newCameraAttributions copy];
return;
}
//register for audio changes
-(BOOL)watchAudioDevice:(AVCaptureDevice*)device
{
//ret var
BOOL bRegistered = NO;
//status var
OSStatus status = -1;
//device ID
AudioObjectID deviceID = 0;
//property struct
AudioObjectPropertyAddress propertyStruct = {0};
//init property struct's selector
propertyStruct.mSelector = kAudioDevicePropertyDeviceIsRunningSomewhere;
//init property struct's scope
propertyStruct.mScope = kAudioObjectPropertyScopeGlobal;
//init property struct's element
propertyStruct.mElement = kAudioObjectPropertyElementMaster;
//block
// invoked when audio changes
AudioObjectPropertyListenerBlock listenerBlock = ^(UInt32 inNumberAddresses, const AudioObjectPropertyAddress *inAddresses)
{
//state
NSInteger state = -1;
//event
__block Event* event = nil;
//get state
state = [self getMicState:device];
//dbg msg
os_log_debug(logHandle, "Mic: %{public}@ changed state to %ld", device.localizedName, (long)state);
//save last mic off
if(NSControlStateValueOff == state)
{
//save
self.lastMicOff = device;
}
//macOS 13.3+
// use this as trigger
// older version send event via log monitor
if (@available(macOS 13.3, *)) {
//dbg msg
os_log_debug(logHandle, "new audio event");
//audio off?
if(NSControlStateValueOff == state)
{
//dbg msg
os_log_debug(logHandle, "audio event: off");
//init event
// process (client) and device are nil
event = [[Event alloc] init:nil device:device deviceType:Device_Microphone state:NSControlStateValueOff];
//handle event
[self handleEvent:event];
}
//audio on?
else if(NSControlStateValueOn == state)
{
//dbg msg
os_log_debug(logHandle, "audio event: on");
//delay
// need time for logging to grab responsible process
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0.5 * NSEC_PER_SEC), dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
//client
Client* client = nil;
//have client?
if(0 != self.lastMicClient)
{
//init client from attribution
client = [[Client alloc] init];
client.pid = [NSNumber numberWithInteger:self.lastMicClient];
client.path = valueForStringItem(getProcessPath(client.pid.intValue));
client.name = valueForStringItem(getProcessName(client.path));
}
//init event
// devivce is nil
event = [[Event alloc] init:client device:device deviceType:Device_Microphone state:NSControlStateValueOn];
//handle event
[self handleEvent:event];
});
}
} //macOS 13.3+
};
//get device ID
deviceID = [self getAVObjectID:device];
if(0 == deviceID)
{
//err msg
os_log_error(logHandle, "ERROR: 'failed to find %{public}@'s object id", device.localizedName);
//bail
goto bail;
}
//add property listener for audio changes
status = AudioObjectAddPropertyListenerBlock(deviceID, &propertyStruct, self.eventQueue, listenerBlock);
if(noErr != status)
{
//err msg
os_log_error(logHandle, "ERROR: AudioObjectAddPropertyListenerBlock() failed with %d", status);
//bail
goto bail;
}
//save
self.audioListeners[device.uniqueID] = listenerBlock;
//dbg msg
os_log_debug(logHandle, "monitoring %{public}@ (uuid: %{public}@ / %x) for audio changes", device.localizedName, device.uniqueID, deviceID);
//happy
bRegistered = YES;
bail:
return bRegistered;
}
//register for video changes
-(BOOL)watchVideoDevice:(AVCaptureDevice*)device
{
//ret var
BOOL bRegistered = NO;
//status var
OSStatus status = -1;
//device id
CMIOObjectID deviceID = 0;
//property struct
CMIOObjectPropertyAddress propertyStruct = {0};
//init property struct's selector
propertyStruct.mSelector = kAudioDevicePropertyDeviceIsRunningSomewhere;
//init property struct's scope
propertyStruct.mScope = kAudioObjectPropertyScopeGlobal;
//init property struct's element
propertyStruct.mElement = kAudioObjectPropertyElementMaster;
//block
// invoked when video changes
CMIOObjectPropertyListenerBlock listenerBlock = ^(UInt32 inNumberAddresses, const CMIOObjectPropertyAddress addresses[])
{
//state
NSInteger state = -1;
//get state
state = [self getCameraState:device];
//dbg msg
os_log_debug(logHandle, "Camera: %{public}@ changed state to %ld", device.localizedName, (long)state);
//save last camera off
if(NSControlStateValueOff == state)
{
//save
self.lastCameraOff = device;
}
//camera on?
// macOS 13.3+, use this as trigger
// older version send event via log monitor
if (@available(macOS 13.3, *)) {
//event
__block Event* event = nil;
//dbg msg
os_log_debug(logHandle, "new camera event");
//camera: on
if(NSControlStateValueOn == state)
{
//dbg msg
os_log_debug(logHandle, "camera event: on");
//delay
// need time for logging to grab responsible process
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0.5 * NSEC_PER_SEC), dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
//client
Client* client = nil;
//have a client?
if(0 != self.lastCameraClient)
{
//init client from attribution
client = [[Client alloc] init];
client.pid = [NSNumber numberWithInteger:self.lastCameraClient];
client.path = valueForStringItem(getProcessPath(client.pid.intValue));
client.name = valueForStringItem(getProcessName(client.path));
}
//init event
// with client and (active) camera
event = [[Event alloc] init:client device:device deviceType:Device_Camera state:NSControlStateValueOn];
//handle event
[self handleEvent:event];
});
}
//camera: off
else if(NSControlStateValueOff == state)
{
//dbg msg
os_log_debug(logHandle, "camera event: off");
//init event
// process (client) and device are nil
event = [[Event alloc] init:nil device:device deviceType:Device_Camera state:NSControlStateValueOff];
//handle event
[self handleEvent:event];
}
} //macOS 13.3
};
//get device ID
deviceID = [self getAVObjectID:device];
if(0 == deviceID)
{
//err msg
os_log_error(logHandle, "ERROR: 'failed to find %{public}@'s object id", device.localizedName);
//bail
goto bail;
}
//register (add) property block listener
status = CMIOObjectAddPropertyListenerBlock(deviceID, &propertyStruct, self.eventQueue, listenerBlock);
if(noErr != status)
{
//err msg
os_log_error(logHandle, "ERROR: CMIOObjectAddPropertyListenerBlock() failed with %d", status);
//bail
goto bail;
}
//save
self.cameraListeners[device.uniqueID] = listenerBlock;
//dbg msg
os_log_debug(logHandle, "monitoring %{public}@ (uuid: %{public}@ / %x) for video changes", device.localizedName, device.uniqueID, deviceID);
//happy
bRegistered = YES;
bail:
return bRegistered;
}
//enumerate active devices
-(NSMutableArray*)enumerateActiveDevices
{
//active device
NSMutableArray* activeDevices = nil;
//init
activeDevices = [NSMutableArray array];
//look for active cameras
for(AVCaptureDevice* camera in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo])
{
//skip virtual devices (e.g. OBS virtual camera)
// TODO: is there a better way to determine this?
if(YES == [camera.localizedName containsString:@"Virtual"])
{
//skip
continue;
}
//save those that are one
if(NSControlStateValueOn == [self getCameraState:camera])
{
//save
[activeDevices addObject:camera];
}
}
//look for active mic
for(AVCaptureDevice* microphone in [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio])
{
//save those that are one
if(NSControlStateValueOn == [self getMicState:microphone])
{
//save
[activeDevices addObject:microphone];
}
}
return activeDevices;
}
//get built-in mic
// looks for Apple device that's 'BuiltInMicrophoneDevice'
-(AVCaptureDevice*)findBuiltInMic
{
//mic
AVCaptureDevice* builtInMic = 0;
//built in mic appears as "BuiltInMicrophoneDevice"
for(AVCaptureDevice* currentMic in [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio])
{
//dbg msg
os_log_debug(logHandle, "device: %{public}@/%{public}@/%{public}@", currentMic.manufacturer, currentMic.localizedName, currentMic.uniqueID);
//is device apple + built in mic?
if(YES == [currentMic.manufacturer isEqualToString:@"Apple Inc."])
{
//is built in mic?
if( (YES == [currentMic.uniqueID isEqualToString:@"BuiltInMicrophoneDevice"]) ||
(YES == [currentMic.localizedName isEqualToString:@"Built-in Microphone"]) )
{
//found
builtInMic = currentMic;
break;
}
}
}
//not found?
// grab default
if(0 == builtInMic)
{
//get mic / id
builtInMic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
//dbg msg
os_log_debug(logHandle, "Apple Mic not found, defaulting to default device: %{public}@/%{public}@", builtInMic.manufacturer, builtInMic.localizedName);
}
return builtInMic;
}
//get built-in camera
-(AVCaptureDevice*)findBuiltInCamera
{
//camera
AVCaptureDevice* builtInCamera = 0;
//built in mic appears as "BuiltInMicrophoneDevice"
for(AVCaptureDevice* currentCamera in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo])
{
//dbg msg
os_log_debug(logHandle, "device: %{public}@/%{public}@/%{public}@", currentCamera.manufacturer, currentCamera.localizedName, currentCamera.uniqueID);
//is device apple + built in camera?
if(YES == [currentCamera.manufacturer isEqualToString:@"Apple Inc."])
{
//is built in camera?
if( (YES == [currentCamera.uniqueID isEqualToString:@"FaceTime HD Camera"]) ||
(YES == [currentCamera.localizedName isEqualToString:@"FaceTime-HD-camera"]) ||
(YES == [currentCamera.localizedName isEqualToString:@"FaceTime HD Camera"]) )
{
//found
builtInCamera = currentCamera;
break;
}
}
}
//not found?
// grab default
if(0 == builtInCamera)
{
//get mic / id
builtInCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//dbg msg
os_log_debug(logHandle, "Apple Camera not found, defaulting to default device: %{public}@/%{public}@)", builtInCamera.manufacturer, builtInCamera.localizedName);
}
return builtInCamera;
}
//get av object's ID
-(UInt32)getAVObjectID:(AVCaptureDevice*)device
{
//object id
UInt32 objectID = 0;
//selector for getting device id
SEL methodSelector = nil;
//init selector
methodSelector = NSSelectorFromString(@"connectionID");
//sanity check
if(YES != [device respondsToSelector:methodSelector])
{
//bail
goto bail;
}
//ignore warning
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpointer-to-int-cast"
#pragma clang diagnostic ignored "-Warc-performSelector-leaks"
//grab connection ID
objectID = (UInt32)[device performSelector:methodSelector withObject:nil];
//restore
#pragma clang diagnostic pop
bail:
return objectID;
}
//determine if audio device is active
-(UInt32)getMicState:(AVCaptureDevice*)device
{
//status var
OSStatus status = -1;
//device ID
AudioObjectID deviceID = 0;
//running flag
UInt32 isRunning = 0;
//size of query flag
UInt32 propertySize = 0;
//get device ID
deviceID = [self getAVObjectID:device];
if(0 == deviceID)
{
//err msg
os_log_error(logHandle, "ERROR: 'failed to find %{public}@'s object id", device.localizedName);
//set error
isRunning = -1;
//bail
goto bail;
}
//init size
propertySize = sizeof(isRunning);
//query to get 'kAudioDevicePropertyDeviceIsRunningSomewhere' status
status = AudioDeviceGetProperty(deviceID, 0, false, kAudioDevicePropertyDeviceIsRunningSomewhere, &propertySize, &isRunning);
if(noErr != status)
{
//err msg
os_log_error(logHandle, "ERROR: failed to get run state for %{public}@ (error: %#x)", device.localizedName, status);
//set error
isRunning = -1;
//bail
goto bail;
}
bail:
return isRunning;
}
//check if a specified video is active
// note: on M1 this sometimes always says 'on' (smh apple)
-(UInt32)getCameraState:(AVCaptureDevice*)device
{
//status var
OSStatus status = -1;
//device ID
CMIODeviceID deviceID = 0;
//running flag
UInt32 isRunning = 0;
//size of query flag
UInt32 propertySize = 0;
//property address struct
CMIOObjectPropertyAddress propertyStruct = {0};
//get device ID
deviceID = [self getAVObjectID:device];
if(0 == deviceID)
{
//err msg
os_log_error(logHandle, "ERROR: 'failed to find %{public}@'s object id", device.localizedName);
//set error
isRunning = -1;
//bail
goto bail;
}
//init size
propertySize = sizeof(isRunning);
//init property struct's selector
propertyStruct.mSelector = kAudioDevicePropertyDeviceIsRunningSomewhere;
//init property struct's scope
propertyStruct.mScope = kCMIOObjectPropertyScopeGlobal;
//init property struct's element
propertyStruct.mElement = kAudioObjectPropertyElementMaster;
//query to get 'kAudioDevicePropertyDeviceIsRunningSomewhere' status
status = CMIOObjectGetPropertyData(deviceID, &propertyStruct, 0, NULL, sizeof(kAudioDevicePropertyDeviceIsRunningSomewhere), &propertySize, &isRunning);
if(noErr != status)
{
//err msg
os_log_error(logHandle, "ERROR: failed to get run state for %{public}@ (error: %#x)", device.localizedName, status);
//set error
isRunning = -1;
//bail
goto bail;
}
bail:
return isRunning;
}
//should an event be shown?
-(NSUInteger)shouldShowNotification:(Event*)event
{
//result
NSUInteger result = NOTIFICATION_ERROR;
//device ID
NSNumber* deviceID = 0;
//device's last event
Event* deviceLastEvent = nil;
//get device ID
deviceID = [NSNumber numberWithInt:[self getAVObjectID:event.device]];
if(0 == deviceID)
{
//err msg
os_log_error(logHandle, "ERROR: 'failed to find %{public}@'s object id", event.device.localizedName);
//bail
goto bail;
}
//extract its last event
deviceLastEvent = self.deviceEvents[deviceID];
//disabled?
// really shouldn't ever get here, but can't hurt to check
if(YES == [NSUserDefaults.standardUserDefaults boolForKey:PREF_IS_DISABLED])
{
//set result
result = NOTIFICATION_SPURIOUS;
//dbg msg
os_log_debug(logHandle, "disable is set, so ignoring event");
//bail
goto bail;
}
//inactive alerting off?
// ignore if event is an inactive/off
if( (NSControlStateValueOff == event.state) &&
(YES == [NSUserDefaults.standardUserDefaults boolForKey:PREF_DISABLE_INACTIVE]))
{
//set result
result = NOTIFICATION_SKIPPED;
//dbg msg
os_log_debug(logHandle, "disable inactive alerts set, so ignoring inactive/off event");
//bail
goto bail;
}
//no external devices mode?
if(YES == [NSUserDefaults.standardUserDefaults boolForKey:PREF_NO_EXTERNAL_DEVICES_MODE])
{
//on?
// we have the device directly
if(NSControlStateValueOn == event.state)
{
//external device?
// don't show notification
if( (YES != [self.builtInMic.uniqueID isEqualToString:event.device.uniqueID]) &&
(YES != [self.builtInCamera.uniqueID isEqualToString:event.device.uniqueID]) )
{
//set result
result = NOTIFICATION_SKIPPED;
//dbg msg
os_log_debug(logHandle, "ingore external devices is set, so ignoring external device 'on' event");
//bail
goto bail;
}
}
//off
// check last device that turned off
else
{
//mic
// check last mic off device
if( (Device_Microphone == event.deviceType) &&
(nil != self.lastMicOff) &&
(YES != [self.builtInMic.uniqueID isEqualToString:self.lastMicOff.uniqueID]) )
{
//set result
result = NOTIFICATION_SKIPPED;
//dbg msg
os_log_debug(logHandle, "ingore external devices is set, so ignoring external mic 'off' event");
//bail
goto bail;
}
//camera
// check last camera off device
if( (Device_Camera == event.deviceType) &&
(nil != self.lastCameraOff) &&
(YES != [self.builtInCamera.uniqueID isEqualToString:self.lastCameraOff.uniqueID]) )
{
//set result
result = NOTIFICATION_SKIPPED;
//dbg msg
os_log_debug(logHandle, "ingore external devices is set, so ignoring external camera 'off' event");
//bail
goto bail;
}
}
} //PREF_NO_EXTERNAL_DEVICES_MODE
//macOS sometimes toggles / delivers 2x events for same device
if(deviceLastEvent.deviceType == event.deviceType)
{
//ignore if last event was < 1.0s ago
if([event.timestamp timeIntervalSinceDate:deviceLastEvent.timestamp] < 1.0f)
{
//set result
result = NOTIFICATION_SPURIOUS;
//dbg msg
os_log_debug(logHandle, "ignoring event, as it happened <1.0s ago");
//bail
goto bail;
}
//ignore if last event was same state
if( (deviceLastEvent.state == event.state) &&
([event.timestamp timeIntervalSinceDate:deviceLastEvent.timestamp] < 1.0f) )
{
//set result
result = NOTIFICATION_SPURIOUS;
//dbg msg
os_log_debug(logHandle, "ignoring event as it was same state as last (%ld), and happened <1.0s ago", (long)event.state);
//bail
goto bail;
}
} //same device
//client provided?
// check if its allowed
if(nil != event.client)
{
//match is simply: device and path
for(NSDictionary* allowedItem in [NSUserDefaults.standardUserDefaults objectForKey:PREFS_ALLOWED_ITEMS])
{
//match?
if( ([allowedItem[EVENT_DEVICE] intValue] == event.deviceType) &&
(YES == [allowedItem[EVENT_PROCESS_PATH] isEqualToString:event.client.path]) )
{
//set result
result = NOTIFICATION_SKIPPED;
//dbg msg
os_log_debug(logHandle, "%{public}@ is allowed to access %d, so no notification will not be shown", event.client.path, event.deviceType);
//done
goto bail;
}
}
}
//set result
result = NOTIFICATION_DELIVER;
bail:
//(always) update last event
self.deviceEvents[deviceID] = event;
return result;
}
//handle an event
// show alert / exec user action
-(void)handleEvent:(Event*)event
{
//result
NSInteger result = NOTIFICATION_ERROR;
//should show?
@synchronized (self) {
//show?
result = [self shouldShowNotification:event];
}
//dbg msg
os_log_debug(logHandle, "'shouldShowNotification:' method returned %ld", (long)result);
//deliver/show user?
if(NOTIFICATION_DELIVER == result)
{
//deliver
[self showNotification:event];
}
//should (also) exec user action?
if( (NOTIFICATION_ERROR != result) &&
(NOTIFICATION_SPURIOUS != result) &&
(0 != [[NSUserDefaults.standardUserDefaults objectForKey:PREF_EXECUTE_PATH] length]) )
{
//exec
[self executeUserAction:event];
}
return;
}
//build and display notification
-(void)showNotification:(Event*)event
{
//notification content
UNMutableNotificationContent* content = nil;
//notificaito0n request
UNNotificationRequest* request = nil;
//alloc content
content = [[UNMutableNotificationContent alloc] init];
//title
NSMutableString* title = nil;
//set (default) category
content.categoryIdentifier = CATEGORY_CLOSE;
//alloc title
title = [NSMutableString string];
//set device type
(Device_Camera == event.deviceType) ? [title appendString:@"📸"] : [title appendFormat:@"🎙️"];
//set status
(NSControlStateValueOn == event.state) ? [title appendString:@" Became Active!"] : [title appendString:@" Became Inactive."];
//set title
content.title = title;
//sub-title
// device name
if(nil != event.device)
{
//set
content.subtitle = [NSString stringWithFormat:@"%@", event.device.localizedName];
}
//have client?
// use as body
if(nil != event.client)
{
//set body
content.body = [NSString stringWithFormat:@"Process: %@ (%@)", event.client.name, (0 != event.client.pid.intValue) ? event.client.pid : @"pid: unknown"];
//set category
content.categoryIdentifier = CATEGORY_ACTION;
//set user info
content.userInfo = @{EVENT_DEVICE:@(event.deviceType), EVENT_PROCESS_ID:event.client.pid, EVENT_PROCESS_PATH:event.client.path};
}
else if(nil != event.device)
{
//set body
content.body = [NSString stringWithFormat:@"Device: %@", event.device.localizedName];
}
//init request
request = [UNNotificationRequest requestWithIdentifier:NSUUID.UUID.UUIDString content:content trigger:NULL];
//send notification
[UNUserNotificationCenter.currentNotificationCenter addNotificationRequest:request withCompletionHandler:^(NSError *_Nullable error)
{
//error?
if(nil != error)
{
//err msg
os_log_error(logHandle, "ERROR failed to deliver notification (error: %@)", error);
}
}];
bail:
return;
}
//execute user action
// via the shell to handle binaries and scripts
-(BOOL)executeUserAction:(Event*)event
{
//flag
BOOL wasExecuted = NO;
//path to action
NSString* action = nil;
//args
NSMutableString* args = nil;
//dbg msg
os_log_debug(logHandle, "executing user action");
//alloc
args = [NSMutableString string];
//grab action
action = [NSUserDefaults.standardUserDefaults objectForKey:PREF_EXECUTE_PATH];
if(YES != [NSFileManager.defaultManager fileExistsAtPath:action])
{
//err msg
os_log_error(logHandle, "ERROR: action %{public}@, does not exist", action);
//bail
goto bail;
}
//pass args?
if(YES == [NSUserDefaults.standardUserDefaults boolForKey:PREF_EXECUTE_ACTION_ARGS])
{
//add device
[args appendString:@"-device "];
(Device_Camera == event.deviceType) ? [args appendString:@"camera"] : [args appendString:@"microphone"];
//add event
[args appendString:@" -event "];
(NSControlStateValueOn == event.state) ? [args appendString:@"on"] : [args appendString:@"off"];
//add process
if(nil != event.client)
{
//add
[args appendString:@" -process "];
[args appendString:event.client.pid.stringValue];
}
//add active device count
[args appendString:@" -activeCount "];
[args appendFormat:@"%lu", [self enumerateActiveDevices].count];
}
//exec user specified action
execTask(SHELL, @[@"-c", [NSString stringWithFormat:@"\"%@\" %@", action, args]], NO, NO);
bail:
return wasExecuted;
}
//stop monitor
-(void)stop
{
//dbg msg
os_log_debug(logHandle, "stopping log monitor");
//stop log monitoring
[self.logMonitor stop];
//dbg msg
os_log_debug(logHandle, "stopping audio monitor(s)");
//unwatch all input audio (mic) devices
for(AVCaptureDevice* audioDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio])
{
//unwatch
[self unwatchAudioDevice:audioDevice];
}
//dbg msg
os_log_debug(logHandle, "stopping video monitor(s)");
//unwatch all input video (cam) devices
for(AVCaptureDevice* videoDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo])
{
//unwatch
[self unwatchVideoDevice:videoDevice];
}
//dbg msg
os_log_debug(logHandle, "unregistering notifications");
//remove connection notification
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceWasConnectedNotification object:nil];
//remove disconnection notification
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceWasDisconnectedNotification object:nil];
//dbg msg
os_log_debug(logHandle, "all stopped...");
return;
}
//stop audio monitor
-(void)unwatchAudioDevice:(AVCaptureDevice*)device
{
//status
OSStatus status = -1;
//device ID
AudioObjectID deviceID = 0;
//property struct
AudioObjectPropertyAddress propertyStruct = {0};
//bail if device was disconnected
if(YES == device.isConnected)
{
//bail
goto bail;
}
//get device ID
deviceID = [self getAVObjectID:device];
if(0 == deviceID)
{
//err msg
os_log_error(logHandle, "ERROR: 'failed to find %{public}@'s object id", device.localizedName);
//bail
goto bail;
}
//init property struct's selector
propertyStruct.mSelector = kAudioDevicePropertyDeviceIsRunningSomewhere;
//init property struct's scope
propertyStruct.mScope = kAudioObjectPropertyScopeGlobal;
//init property struct's element
propertyStruct.mElement = kAudioObjectPropertyElementMaster;
//remove
status = AudioObjectRemovePropertyListenerBlock(deviceID, &propertyStruct, self.eventQueue, self.audioListeners[device.uniqueID]);
if(noErr != status)
{
//err msg
os_log_error(logHandle, "ERROR: 'AudioObjectRemovePropertyListenerBlock' failed with %d", status);
//bail
goto bail;
}
//dbg msg
os_log_debug(logHandle, "stopped monitoring %{public}@ (uuid: %{public}@ / %x) for audio changes", device.localizedName, device.uniqueID, deviceID);
//unset listener block
self.audioListeners[device.uniqueID] = nil;
bail:
return;
}
//stop video monitor
-(void)unwatchVideoDevice:(AVCaptureDevice*)device
{
//status
OSStatus status = -1;
//device id
CMIOObjectID deviceID = 0;
//property struct
CMIOObjectPropertyAddress propertyStruct = {0};
//bail if device was disconnected
if(YES == device.isConnected)
{
//bail
goto bail;
}
//get device ID
deviceID = [self getAVObjectID:device];
if(0 == deviceID)
{
//err msg
os_log_error(logHandle, "ERROR: 'failed to find %{public}@'s object id", device.localizedName);
//bail
goto bail;
}
//init property struct's selector
propertyStruct.mSelector = kAudioDevicePropertyDeviceIsRunningSomewhere;
//init property struct's scope
propertyStruct.mScope = kAudioObjectPropertyScopeGlobal;
//init property struct's element
propertyStruct.mElement = kAudioObjectPropertyElementMaster;
//remove
status = CMIOObjectRemovePropertyListenerBlock(deviceID, &propertyStruct, self.eventQueue, self.cameraListeners[device.uniqueID]);
if(noErr != status)
{
//err msg
os_log_error(logHandle, "ERROR: 'AudioObjectRemovePropertyListenerBlock' failed with %d", status);
//bail
goto bail;
}
//dbg msg
os_log_debug(logHandle, "stopped monitoring %{public}@ (uuid: %{public}@ / %x) for video changes", device.localizedName, device.uniqueID, deviceID);
bail:
//always unset listener block
self.cameraListeners[device.uniqueID] = nil;
return;
}
# pragma mark UNNotificationCenter Delegate Methods
- (void)userNotificationCenter:(UNUserNotificationCenter *)center willPresentNotification:(UNNotification *)notification withCompletionHandler:(void (^)(UNNotificationPresentationOptions options))completionHandler {
completionHandler(UNNotificationPresentationOptionAlert);
return;
}
//handle user response to notification
-(void)userNotificationCenter:(UNUserNotificationCenter *)center didReceiveNotificationResponse:(UNNotificationResponse *)response withCompletionHandler:(void (^)(void))completionHandler {
//allowed items
NSMutableArray* allowedItems = nil;
//device
NSNumber* device = nil;
//process path
NSString* processPath = nil;
//process name
NSString* processName = nil;
//process id
NSNumber* processID = nil;
//error
int error = 0;
//dbg msg
//os_log_debug(logHandle, "user response to notification: %{public}@", response);
//extract device
device = response.notification.request.content.userInfo[EVENT_DEVICE];
//extact process path
processPath = response.notification.request.content.userInfo[EVENT_PROCESS_PATH];
//extract process id
processID = response.notification.request.content.userInfo[EVENT_PROCESS_ID];
//get process name
processName = valueForStringItem(getProcessName(processPath));
//default action
// set/save for logic in 'applicationShouldHandleReopen' ...which gets called :|
if(YES == [response.actionIdentifier isEqualToString:@"com.apple.UNNotificationDefaultActionIdentifier"])
{
//dbg msg
os_log_debug(logHandle, "user click triggered 'com.apple.UNNotificationDefaultActionIdentifier'");
//save
self.lastNotificationDefaultAction = [NSDate date];
//done
goto bail;
}
//close?
// nothing to do
if(YES == [response.notification.request.content.categoryIdentifier isEqualToString:CATEGORY_CLOSE])
{
//dbg msg
os_log_debug(logHandle, "user clicked 'Ok'");
//done
goto bail;
}
//allow?
// really nothing to do
else if(YES == [response.actionIdentifier isEqualToString:@"Allow"])
{
//dbg msg
os_log_debug(logHandle, "user clicked 'Allow'");
//done
goto bail;
}
//always allow?
// added to 'allowed' items
if(YES == [response.actionIdentifier isEqualToString:@"AllowAlways"])
{
//dbg msg
os_log_debug(logHandle, "user clicked 'Allow Always'");
//load allowed items
allowedItems = [[NSUserDefaults.standardUserDefaults objectForKey:PREFS_ALLOWED_ITEMS] mutableCopy];
if(nil == allowedItems)
{
//alloc
allowedItems = [NSMutableArray array];
}
//add item
[allowedItems addObject:@{EVENT_PROCESS_PATH:processPath, EVENT_DEVICE:device}];
//save & sync
[NSUserDefaults.standardUserDefaults setObject:allowedItems forKey:PREFS_ALLOWED_ITEMS];
[NSUserDefaults.standardUserDefaults synchronize];
//dbg msg
os_log_debug(logHandle, "added %{public}@ to list of allowed items", processPath);
//broadcast
[[NSNotificationCenter defaultCenter] postNotificationName:RULES_CHANGED object:nil userInfo:nil];
//done
goto bail;
}
//block?
// kill process
if(YES == [response.actionIdentifier isEqualToString:@"Block"])
{
//dbg msg
os_log_debug(logHandle, "user clicked 'Block'");
//kill
error = kill(processID.intValue, SIGKILL);
if(0 != error)
{
//err msg
os_log_error(logHandle, "ERROR: failed to kill %@ (%@)", processName, processID);
//show an alert
showAlert([NSString stringWithFormat:@"ERROR: failed to terminate %@ (%@)", processName, processID], [NSString stringWithFormat:@"system error code: %d", error], @"OK");
//bail
goto bail;
}
//dbg msg
os_log_debug(logHandle, "killed %@ (%@)", processName, processID);
}
bail:
//gotta call
completionHandler();
return;
}
@end