diff --git a/ManagedCapturer/SCCaptureBaseState.h b/ManagedCapturer/SCCaptureBaseState.h new file mode 100644 index 0000000..ef18f00 --- /dev/null +++ b/ManagedCapturer/SCCaptureBaseState.h @@ -0,0 +1,103 @@ +// +// SCCaptureBaseState.h +// Snapchat +// +// Created by Lin Jia on 10/19/17. +// +// + +#import "SCCaptureCommon.h" +#import "SCCaptureStateDelegate.h" +#import "SCCaptureStateMachineBookKeeper.h" +#import "SCCaptureStateUtil.h" +#import "SCCaptureWorker.h" +#import "SCManagedCaptureDevice.h" +#import "SCManagedCapturerState.h" +#import "SCStateTransitionPayload.h" + +#import + +@class SCCaptureResource; + +@class SCCapturerToken; + +@class SCAudioConfiguration; + +@class SCQueuePerformer; +/* + Every state machine state needs to inherent SCCaptureBaseState to have the APIs. State machine state in general will + only implement APIs which are legal for itself. If illegal APIs are invoked, SCCaptureBaseState will handle it. + The intended behavior: + 1) crash using SCAssert in Debug build, + 2) ignore api call, and log the call, for alpha/master/production. + 3) in the future, we will introduce dangerous API call concept, and restart camera in such case, to avoid bad state. + + Every state machine state is going to be built to follow functional programming as more as possible. The shared + resources between them will be passed into the API via SCCaptureResource. + */ + +@interface SCCaptureBaseState : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate; + +/* The following API will be invoked at the moment state context promote the state to be current state. State use this + * chance to do something, such as start recording for recording state. + */ +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context; + +- (SCCaptureStateMachineStateId)stateId; + +- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)startRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource + audioConfiguration:(SCAudioConfiguration *)configuration + context:(NSString *)context; + +- (void)startRecordingWithResource:(SCCaptureResource *)resource + audioConfiguration:(SCAudioConfiguration *)configuration + outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context; + +- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context; + +- (void)captureStillImageWithResource:(SCCaptureResource *)resource + aspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration + resource:(SCCaptureResource *)resource + context:(NSString *)context; + +- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler + resource:(SCCaptureResource *)resource + context:(NSString *)context; + +@property (nonatomic, strong, readonly) SCCaptureStateMachineBookKeeper *bookKeeper; +@end diff --git a/ManagedCapturer/SCCaptureBaseState.m b/ManagedCapturer/SCCaptureBaseState.m new file mode 100644 index 0000000..569ab54 --- /dev/null +++ b/ManagedCapturer/SCCaptureBaseState.m @@ -0,0 +1,169 @@ +// +// SCCaptureBaseState.m +// Snapchat +// +// Created by Lin Jia on 10/19/17. +// +// + +#import "SCCaptureBaseState.h" + +#import "SCCaptureStateMachineBookKeeper.h" +#import "SCCapturerToken.h" +#import "SCManagedCapturerV1_Private.h" + +#import +#import +#import + +@implementation SCCaptureBaseState { + SCCaptureStateMachineBookKeeper *_bookKeeper; + SCQueuePerformer *_performer; + __weak id _delegate; +} + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate +{ + self = [super init]; + if (self) { + SCAssert(performer, @""); + SCAssert(bookKeeper, @""); + _bookKeeper = bookKeeper; + _performer = performer; + _delegate = delegate; + } + return self; +} + +- (SCCaptureStateMachineStateId)stateId +{ + return SCCaptureBaseStateId; +} + +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"didBecomeCurrentState" context:context]; +} + +- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"initializeCaptureWithDevicePosition" context:context]; +} + +- (void)startRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"startRunningWithCapturerToken" context:context]; +} + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCAssertPerformer(_performer); + BOOL actuallyStopped = [[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token + completionHandler:completionHandler + context:context]; + // TODO: Fix CCAM-14450 + // This is a temporary solution for https://jira.sc-corp.net/browse/CCAM-14450 + // It is caused by switching from scanning state to stop running state when the view is disappearing in the scanning + // state, which can be reproduced by triggering scanning and then switch to maps page. + // We remove SCAssert to ingore the crashes in master branch and will find a solution for the illegal call for the + // state machine later + + if (self.stateId != SCCaptureScanningStateId) { + SCAssert(!actuallyStopped, @"actuallyStopped in state: %@ with context: %@", SCCaptureStateName([self stateId]), + context); + } else { + SCLogCaptureStateMachineInfo(@"actuallyStopped:%d in state: %@ with context: %@", actuallyStopped, + SCCaptureStateName([self stateId]), context); + } + + if (actuallyStopped) { + [_delegate currentState:self + requestToTransferToNewState:SCCaptureInitializedStateId + payload:nil + context:context]; + } +} + +- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource + audioConfiguration:(SCAudioConfiguration *)configuration + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"prepareForRecordingWithResource" context:context]; +} + +- (void)startRecordingWithResource:(SCCaptureResource *)resource + audioConfiguration:(SCAudioConfiguration *)configuration + outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"startRecordingWithResource" context:context]; +} + +- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"stopRecordingWithResource" context:context]; +} + +- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"cancelRecordingWithResource" context:context]; +} + +- (void)captureStillImageWithResource:(SCCaptureResource *)resource + aspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"captureStillImageWithResource" context:context]; +} + +- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"startScanWithScanConfiguration" context:context]; +} + +- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + // Temporary solution until IDT-12520 is resolved. + [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource]; + //[self _handleBaseStateBehavior:@"stopScanWithCompletionHandler"]; +} + +- (void)_handleBaseStateBehavior:(NSString *)illegalAPIName context:(NSString *)context +{ + [_bookKeeper state:[self stateId] + illegalAPIcalled:illegalAPIName + callStack:[NSThread callStackSymbols] + context:context]; + if (SCIsDebugBuild()) { + SCAssertFail(@"illegal API invoked on capture state machine"); + } +} + +- (SCCaptureStateMachineBookKeeper *)bookKeeper +{ + return _bookKeeper; +} +@end diff --git a/ManagedCapturer/SCCaptureStateDelegate.h b/ManagedCapturer/SCCaptureStateDelegate.h new file mode 100644 index 0000000..f07766f --- /dev/null +++ b/ManagedCapturer/SCCaptureStateDelegate.h @@ -0,0 +1,30 @@ +// +// SCCaptureStateDelegate.h +// Snapchat +// +// Created by Lin Jia on 10/27/17. +// +// + +#import "SCCaptureStateUtil.h" + +#import + +@class SCCaptureBaseState; +@class SCStateTransitionPayload; +/* + The state machine state delegate is used by state machine states to hint to the system that "I am done, now transfer + to other state". + + Currently, SCCaptureStateMachineContext is the central piece that glues all states together, and it is the delegate for + those states. + */ + +@protocol SCCaptureStateDelegate + +- (void)currentState:(SCCaptureBaseState *)state + requestToTransferToNewState:(SCCaptureStateMachineStateId)newState + payload:(SCStateTransitionPayload *)payload + context:(NSString *)context; + +@end diff --git a/ManagedCapturer/SCCaptureStateMachineBookKeeper.h b/ManagedCapturer/SCCaptureStateMachineBookKeeper.h new file mode 100644 index 0000000..24ea585 --- /dev/null +++ b/ManagedCapturer/SCCaptureStateMachineBookKeeper.h @@ -0,0 +1,29 @@ +// +// SCCaptureStateTransitionBookKeeper.h +// Snapchat +// +// Created by Lin Jia on 10/27/17. +// +// + +#import "SCCaptureStateUtil.h" + +#import + +/* + Book keeper is used to record every state transition, and every illegal API call. + */ + +@interface SCCaptureStateMachineBookKeeper : NSObject + +- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId + to:(SCCaptureStateMachineStateId)toId + context:(NSString *)context; + +- (void)state:(SCCaptureStateMachineStateId)captureState + illegalAPIcalled:(NSString *)illegalAPIName + callStack:(NSArray *)callStack + context:(NSString *)context; + +- (void)logAPICalled:(NSString *)apiName context:(NSString *)context; +@end diff --git a/ManagedCapturer/SCCaptureStateMachineBookKeeper.m b/ManagedCapturer/SCCaptureStateMachineBookKeeper.m new file mode 100644 index 0000000..7d9c466 --- /dev/null +++ b/ManagedCapturer/SCCaptureStateMachineBookKeeper.m @@ -0,0 +1,63 @@ +// +// SCCaptureStateTransitionBookKeeper.m +// Snapchat +// +// Created by Lin Jia on 10/27/17. +// +// + +#import "SCCaptureStateMachineBookKeeper.h" + +#import "SCCaptureStateUtil.h" +#import "SCLogger+Camera.h" + +#import +#import + +@interface SCCaptureStateMachineBookKeeper () { + NSDate *_lastStateStartTime; +} +@end + +@implementation SCCaptureStateMachineBookKeeper + +- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId + to:(SCCaptureStateMachineStateId)toId + context:(NSString *)context +{ + NSDate *date = [NSDate date]; + SCLogCaptureStateMachineInfo(@"State %@ life span: %f seconds, transition to: %@, in context:%@, at: %@ \n", + SCCaptureStateName(fromId), [date timeIntervalSinceDate:_lastStateStartTime], + SCCaptureStateName(toId), context, date); + _lastStateStartTime = date; +} + +- (void)state:(SCCaptureStateMachineStateId)captureState + illegalAPIcalled:(NSString *)illegalAPIName + callStack:(NSArray *)callStack + context:(NSString *)context + +{ + SCAssert(callStack, @"call stack empty"); + SCAssert(illegalAPIName, @""); + SCAssert(context, @"Context is empty"); + SCLogCaptureStateMachineError(@"State: %@, illegal API invoke: %@, at: %@, callstack: %@ \n", + SCCaptureStateName(captureState), illegalAPIName, [NSDate date], callStack); + NSArray *reportedArray = + [callStack count] > 15 ? [callStack subarrayWithRange:NSMakeRange(0, 15)] : callStack; + [[SCLogger sharedInstance] logEvent:kSCCameraStateMachineIllegalAPICall + parameters:@{ + @"state" : SCCaptureStateName(captureState), + @"API" : illegalAPIName, + @"call_stack" : reportedArray, + @"context" : context + }]; +} + +- (void)logAPICalled:(NSString *)apiName context:(NSString *)context +{ + SCAssert(apiName, @"API name is empty"); + SCAssert(context, @"Context is empty"); + SCLogCaptureStateMachineInfo(@"api: %@ context: %@", apiName, context); +} +@end diff --git a/ManagedCapturer/SCCaptureStateMachineContext.h b/ManagedCapturer/SCCaptureStateMachineContext.h new file mode 100644 index 0000000..1e98943 --- /dev/null +++ b/ManagedCapturer/SCCaptureStateMachineContext.h @@ -0,0 +1,76 @@ +// +// SCCaptureStateMachineContext.h +// Snapchat +// +// Created by Lin Jia on 10/18/17. +// +// + +#import "SCCaptureCommon.h" +#import "SCManagedCaptureDevice.h" + +#import + +#import + +/* + SCCaptureStateMachineContext is the central piece that glues all states together. + + It will pass API calls to the current state. + + The classic state machine design pattern: + https://en.wikipedia.org/wiki/State_pattern + + It is also the delegate for the states it manages, so that those states can tell stateMachineContext to transit to next + state. + */ + +@class SCCaptureResource; + +@class SCCapturerToken; + +@interface SCCaptureStateMachineContext : NSObject + +- (instancetype)initWithResource:(SCCaptureResource *)resource; + +- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler; + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + after:(NSTimeInterval)delay + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration + context:(NSString *)context; + +- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)stopRecordingWithContext:(NSString *)context; + +- (void)cancelRecordingWithContext:(NSString *)context; + +- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler: + (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context; + +#pragma mark - Scanning +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context; +- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; + +@end diff --git a/ManagedCapturer/SCCaptureStateMachineContext.m b/ManagedCapturer/SCCaptureStateMachineContext.m new file mode 100644 index 0000000..5fd1b7a --- /dev/null +++ b/ManagedCapturer/SCCaptureStateMachineContext.m @@ -0,0 +1,301 @@ +// +// SCCaptureStateMachineContext.m +// Snapchat +// +// Created by Lin Jia on 10/18/17. +// +// + +#import "SCCaptureStateMachineContext.h" + +#import "SCCaptureBaseState.h" +#import "SCCaptureImageState.h" +#import "SCCaptureImageWhileRecordingState.h" +#import "SCCaptureInitializedState.h" +#import "SCCaptureRecordingState.h" +#import "SCCaptureResource.h" +#import "SCCaptureRunningState.h" +#import "SCCaptureScanningState.h" +#import "SCCaptureStateMachineBookKeeper.h" +#import "SCCaptureStateUtil.h" +#import "SCCaptureUninitializedState.h" +#import "SCCaptureWorker.h" +#import "SCCapturerToken.h" +#import "SCStateTransitionPayload.h" + +#import +#import +#import +#import +#import +#import + +@interface SCCaptureStateMachineContext () { + SCQueuePerformer *_queuePerformer; + + // Cache all the states. + NSMutableDictionary *_states; + SCCaptureBaseState *_currentState; + SCCaptureStateMachineBookKeeper *_bookKeeper; + SCCaptureResource *_captureResource; +} +@end + +@implementation SCCaptureStateMachineContext + +- (instancetype)initWithResource:(SCCaptureResource *)resource +{ + self = [super init]; + if (self) { + SCAssert(resource, @""); + SCAssert(resource.queuePerformer, @""); + _captureResource = resource; + _queuePerformer = resource.queuePerformer; + _states = [[NSMutableDictionary alloc] init]; + _bookKeeper = [[SCCaptureStateMachineBookKeeper alloc] init]; + [self _setCurrentState:SCCaptureUninitializedStateId payload:nil context:SCCapturerContext]; + } + return self; +} + +- (void)_setCurrentState:(SCCaptureStateMachineStateId)stateId + payload:(SCStateTransitionPayload *)payload + context:(NSString *)context +{ + switch (stateId) { + case SCCaptureUninitializedStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureUninitializedState *uninitializedState = + [[SCCaptureUninitializedState alloc] initWithPerformer:_queuePerformer + bookKeeper:_bookKeeper + delegate:self]; + [_states setObject:uninitializedState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureInitializedStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureInitializedState *initializedState = + [[SCCaptureInitializedState alloc] initWithPerformer:_queuePerformer + bookKeeper:_bookKeeper + delegate:self]; + [_states setObject:initializedState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureRunningStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureRunningState *runningState = + [[SCCaptureRunningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; + [_states setObject:runningState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureImageStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureImageState *captureImageState = + [[SCCaptureImageState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; + [_states setObject:captureImageState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureImageWhileRecordingStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureImageWhileRecordingState *captureImageWhileRecordingState = + [[SCCaptureImageWhileRecordingState alloc] initWithPerformer:_queuePerformer + bookKeeper:_bookKeeper + delegate:self]; + [_states setObject:captureImageWhileRecordingState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureScanningStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureScanningState *scanningState = + [[SCCaptureScanningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; + [_states setObject:scanningState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureRecordingStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureRecordingState *recordingState = [[SCCaptureRecordingState alloc] initWithPerformer:_queuePerformer + bookKeeper:_bookKeeper + delegate:self]; + [_states setObject:recordingState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + default: + SCAssert(NO, @"illigal state Id"); + break; + } + [_currentState didBecomeCurrentState:payload resource:_captureResource context:context]; +} + +- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + [SCCaptureWorker setupCapturePreviewLayerController]; + + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState initializeCaptureWithDevicePosition:devicePosition + resource:_captureResource + completionHandler:completionHandler + context:context]; + }]; +} + +- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler +{ + [[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@""]; + + SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context]; + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState startRunningWithCapturerToken:token + resource:_captureResource + completionHandler:completionHandler + context:context]; + }]; + + return token; +} + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState stopRunningWithCapturerToken:token + resource:_captureResource + completionHandler:completionHandler + context:context]; + }]; +} + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + after:(NSTimeInterval)delay + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState stopRunningWithCapturerToken:token + resource:_captureResource + completionHandler:completionHandler + context:context]; + } + after:delay]; +} + +- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration + context:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState prepareForRecordingWithResource:_captureResource + audioConfiguration:configuration + context:context]; + }]; +} + +- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState startRecordingWithResource:_captureResource + audioConfiguration:configuration + outputSettings:outputSettings + maxDuration:maxDuration + fileURL:fileURL + captureSessionID:captureSessionID + completionHandler:completionHandler + context:context]; + }]; +} + +- (void)stopRecordingWithContext:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState stopRecordingWithResource:_captureResource context:context]; + }]; +} + +- (void)cancelRecordingWithContext:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState cancelRecordingWithResource:_captureResource context:context]; + }]; +} + +- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler: + (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context +{ + [_queuePerformer perform:^() { + [_currentState captureStillImageWithResource:_captureResource + aspectRatio:aspectRatio + captureSessionID:captureSessionID + completionHandler:completionHandler + context:context]; + }]; +} + +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context +{ + [_queuePerformer perform:^() { + [_currentState startScanWithScanConfiguration:configuration resource:_captureResource context:context]; + }]; +} + +- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context +{ + [_queuePerformer perform:^() { + [_currentState stopScanWithCompletionHandler:completionHandler resource:_captureResource context:context]; + }]; +} + +- (void)currentState:(SCCaptureBaseState *)state + requestToTransferToNewState:(SCCaptureStateMachineStateId)newState + payload:(SCStateTransitionPayload *)payload + context:(NSString *)context +{ + SCAssertPerformer(_queuePerformer); + SCAssert(_currentState == state, @"state: %@ newState: %@ context:%@", SCCaptureStateName([state stateId]), + SCCaptureStateName(newState), context); + if (payload) { + SCAssert(payload.fromState == [state stateId], @"From state id check"); + SCAssert(payload.toState == newState, @"To state id check"); + } + + if (_currentState != state) { + return; + } + + [_bookKeeper stateTransitionFrom:[state stateId] to:newState context:context]; + [self _setCurrentState:newState payload:payload context:context]; +} + +@end diff --git a/ManagedCapturer/SCCaptureStateUtil.h b/ManagedCapturer/SCCaptureStateUtil.h new file mode 100644 index 0000000..1b8ca4a --- /dev/null +++ b/ManagedCapturer/SCCaptureStateUtil.h @@ -0,0 +1,37 @@ +// +// SCCaptureStateUtil.h +// Snapchat +// +// Created by Lin Jia on 10/27/17. +// +// + +#import "SCLogger+Camera.h" + +#import +#import + +#import + +#define SCLogCaptureStateMachineInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__) +#define SCLogCaptureStateMachineError(fmt, ...) SCLogCoreCameraError(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__) + +typedef NSNumber SCCaptureStateKey; + +typedef NS_ENUM(NSUInteger, SCCaptureStateMachineStateId) { + SCCaptureBaseStateId = 0, + SCCaptureUninitializedStateId, + SCCaptureInitializedStateId, + SCCaptureImageStateId, + SCCaptureImageWhileRecordingStateId, + SCCaptureRunningStateId, + SCCaptureRecordingStateId, + SCCaptureScanningStateId, + SCCaptureStateMachineStateIdCount +}; + +SC_EXTERN_C_BEGIN + +NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId); + +SC_EXTERN_C_END diff --git a/ManagedCapturer/SCCaptureStateUtil.m b/ManagedCapturer/SCCaptureStateUtil.m new file mode 100644 index 0000000..deb20a7 --- /dev/null +++ b/ManagedCapturer/SCCaptureStateUtil.m @@ -0,0 +1,38 @@ +// +// SCCaptureStateUtil.m +// Snapchat +// +// Created by Lin Jia on 10/27/17. +// +// + +#import "SCCaptureStateUtil.h" + +#import +#import + +NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId) +{ + switch (stateId) { + case SCCaptureBaseStateId: + return @"SCCaptureBaseStateId"; + case SCCaptureUninitializedStateId: + return @"SCCaptureUninitializedStateId"; + case SCCaptureInitializedStateId: + return @"SCCaptureInitializedStateId"; + case SCCaptureImageStateId: + return @"SCCaptureImageStateId"; + case SCCaptureImageWhileRecordingStateId: + return @"SCCaptureImageWhileRecordingStateId"; + case SCCaptureRunningStateId: + return @"SCCaptureRunningStateId"; + case SCCaptureRecordingStateId: + return @"SCCaptureRecordingStateId"; + case SCCaptureScanningStateId: + return @"SCCaptureScanningStateId"; + default: + SCCAssert(NO, @"illegate state id"); + break; + } + return @"SCIllegalStateId"; +} diff --git a/ManagedCapturer/SCManagedCapturerLogging.h b/ManagedCapturer/SCManagedCapturerLogging.h new file mode 100644 index 0000000..069b438 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerLogging.h @@ -0,0 +1,12 @@ +// +// SCManagedCapturerLogging.h +// Snapchat +// +// Created by Lin Jia on 11/13/17. +// + +#import + +#define SCLogCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__) +#define SCLogCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__) +#define SCLogCapturerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__) diff --git a/ManagedCapturer/SCManagedPhotoCapturer.m b/ManagedCapturer/SCManagedPhotoCapturer.m new file mode 100644 index 0000000..07b441e --- /dev/null +++ b/ManagedCapturer/SCManagedPhotoCapturer.m @@ -0,0 +1,667 @@ +// +// SCManagedPhotoCapturer.m +// Snapchat +// +// Created by Chao Pang on 10/5/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedPhotoCapturer.h" + +#import "AVCaptureConnection+InputDevice.h" +#import "SCCameraTweaks.h" +#import "SCLogger+Camera.h" +#import "SCManagedCapturer.h" +#import "SCManagedFrameHealthChecker.h" +#import "SCManagedStillImageCapturer_Protected.h" +#import "SCStillImageCaptureVideoInputMethod.h" +#import "SCStillImageDepthBlurFilter.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import + +@import ImageIO; + +static NSString *const kSCManagedPhotoCapturerErrorDomain = @"kSCManagedPhotoCapturerErrorDomain"; + +static NSInteger const kSCManagedPhotoCapturerErrorEncounteredException = 10000; +static NSInteger const kSCManagedPhotoCapturerInconsistentStatus = 10001; + +typedef NS_ENUM(NSUInteger, SCManagedPhotoCapturerStatus) { + SCManagedPhotoCapturerStatusPrepareToCapture, + SCManagedPhotoCapturerStatusWillCapture, + SCManagedPhotoCapturerStatusDidFinishProcess, +}; + +@interface SCManagedPhotoCapturer () +@end + +@implementation SCManagedPhotoCapturer { + AVCapturePhotoOutput *_photoOutput; + + BOOL _shouldCapture; + BOOL _shouldEnableHRSI; + BOOL _portraitModeCaptureEnabled; + NSUInteger _retries; + + CGPoint _portraitModePointOfInterest; + SCStillImageDepthBlurFilter *_depthBlurFilter; + sc_managed_still_image_capturer_capture_still_image_completion_handler_t _callbackBlock; + + SCStillImageCaptureVideoInputMethod *_videoFileMethod; + + SCManagedPhotoCapturerStatus _status; +} + +- (instancetype)initWithSession:(AVCaptureSession *)session + performer:(id)performer + lensProcessingCore:(id)lensProcessingCore + delegate:(id)delegate +{ + SCTraceStart(); + self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate]; + if (self) { + [self setupWithSession:session]; + _portraitModePointOfInterest = CGPointMake(0.5, 0.5); + } + return self; +} + +- (void)setupWithSession:(AVCaptureSession *)session +{ + SCTraceStart(); + _photoOutput = [[AVCapturePhotoOutput alloc] init]; + _photoOutput.highResolutionCaptureEnabled = YES; + [self setAsOutput:session]; +} + +- (void)setAsOutput:(AVCaptureSession *)session +{ + SCTraceStart(); + if ([session canAddOutput:_photoOutput]) { + [session addOutput:_photoOutput]; + } +} + +- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + // Here we cannot directly set _photoOutput.highResolutionCaptureEnabled, since it will cause + // black frame blink when enabling lenses. Instead, we enable HRSI in AVCapturePhotoSettings. + // https://ph.sc-corp.net/T96228 + _shouldEnableHRSI = highResolutionStillImageOutputEnabled; +} + +- (void)enableStillImageStabilization +{ + // The lens stabilization is enabled when configure AVCapturePhotoSettings + // instead of AVCapturePhotoOutput + SCTraceStart(); +} + +- (void)setPortraitModeCaptureEnabled:(BOOL)enabled +{ + _portraitModeCaptureEnabled = enabled; + if (@available(ios 11.0, *)) { + _photoOutput.depthDataDeliveryEnabled = enabled; + } + if (enabled && _depthBlurFilter == nil) { + _depthBlurFilter = [[SCStillImageDepthBlurFilter alloc] init]; + } +} + +- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest +{ + _portraitModePointOfInterest = pointOfInterest; +} + +- (void)removeAsOutput:(AVCaptureSession *)session +{ + SCTraceStart(); + [session removeOutput:_photoOutput]; +} + +- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio + atZoomFactor:(float)zoomFactor + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler +{ + SCTraceStart(); + SCAssert(completionHandler, @"completionHandler shouldn't be nil"); + SCAssert([_performer isCurrentPerformer], @""); + _retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds + _aspectRatio = aspectRatio; + _zoomFactor = zoomFactor; + _fieldOfView = fieldOfView; + _state = state; + _captureSessionID = captureSessionID; + _shouldCaptureFromVideo = shouldCaptureFromVideo; + SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying " + @"current completion handler."); + + // The purpose of these lines is to attach a strong reference to self to the completion handler. + // This is because AVCapturePhotoOutput does not hold a strong reference to its delegate, which acts as a completion + // handler. + // If self is deallocated during the call to _photoOuptut capturePhotoWithSettings:delegate:, which may happen if + // any AVFoundationError occurs, + // then it's callback method, captureOutput:didFinish..., will not be called, and the completion handler will be + // forgotten. + // This comes with a risk of a memory leak. If for whatever reason the completion handler field is never used and + // then unset, + // then we have a permanent retain cycle. + _callbackBlock = completionHandler; + __typeof(self) strongSelf = self; + _completionHandler = ^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) { + strongSelf->_callbackBlock(fullScreenImage, metadata, error); + strongSelf->_callbackBlock = nil; + }; + [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart]; + + if (!_adjustingExposureManualDetect) { + SCLogCoreCameraInfo(@"Capturing still image now"); + [self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; + _shouldCapture = NO; + } else { + SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image"); + _shouldCapture = YES; + [self _deadlineCapturePhoto]; + } +} + +#pragma mark - SCManagedDeviceCapacityAnalyzerListener + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. + self->_adjustingExposureManualDetect = adjustingExposure; + [self _didChangeAdjustingExposure:adjustingExposure + withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect]; + }]; +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + self->_lightingConditionType = lightingCondition; + }]; +} + +#pragma mark - SCManagedCapturerListener + +- (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. + [self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO]; + }]; +} + +#pragma mark - AVCapturePhotoCaptureDelegate + +- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput + didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer + previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer + resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings + bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings + error:(NSError *)error +{ + SCTraceStart(); + if (photoSampleBuffer) { + CFRetain(photoSampleBuffer); + } + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + SC_GUARD_ELSE_RUN_AND_RETURN(photoSampleBuffer, [self _photoCaptureDidFailWithError:error]); + if (self->_status == SCManagedPhotoCapturerStatusWillCapture) { + NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer + previewPhotoSampleBuffer:nil]; + + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"DID_FINISH_PROCESSING"]; + [self _capturePhotoFinishedWithImageData:imageData + sampleBuffer:photoSampleBuffer + cameraInfo:cameraInfoForBuffer(photoSampleBuffer) + error:error]; + + } else { + SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@", + [self _photoCapturerStatusToString:self->_status]); + [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain + code:kSCManagedPhotoCapturerInconsistentStatus + userInfo:nil]]; + } + CFRelease(photoSampleBuffer); + }]; +} + +- (void)captureOutput:(AVCapturePhotoOutput *)output + didFinishProcessingPhoto:(nonnull AVCapturePhoto *)photo + error:(nullable NSError *)error NS_AVAILABLE_IOS(11_0) +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + NSData *imageData = [photo fileDataRepresentation]; + SC_GUARD_ELSE_RUN_AND_RETURN(imageData, [self _photoCaptureDidFailWithError:error]); + if (self->_status == SCManagedPhotoCapturerStatusWillCapture) { + if (@available(ios 11.0, *)) { + if (_portraitModeCaptureEnabled) { + RenderData renderData = { + .depthDataMap = photo.depthData.depthDataMap, + .depthBlurPointOfInterest = &_portraitModePointOfInterest, + }; + imageData = [_depthBlurFilter renderWithPhotoData:imageData renderData:renderData]; + } + } + + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"DID_FINISH_PROCESSING"]; + + [self _capturePhotoFinishedWithImageData:imageData metadata:photo.metadata error:error]; + + } else { + SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@", + [self _photoCapturerStatusToString:self->_status]); + [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain + code:kSCManagedPhotoCapturerInconsistentStatus + userInfo:nil]]; + } + }]; +} + +- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput + willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) { + if (self->_status == SCManagedPhotoCapturerStatusPrepareToCapture) { + self->_status = SCManagedPhotoCapturerStatusWillCapture; + + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"WILL_BEGIN_CAPTURE"]; + [self->_delegate managedStillImageCapturerWillCapturePhoto:self]; + } else { + SCLogCoreCameraInfo(@"WillBeginCapture with unexpected status: %@", + [self _photoCapturerStatusToString:self->_status]); + } + } + }]; +} + +- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput + didCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) { + if (self->_status == SCManagedPhotoCapturerStatusWillCapture || + self->_status == SCManagedPhotoCapturerStatusDidFinishProcess) { + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"DID_CAPTURE_PHOTO"]; + [self->_delegate managedStillImageCapturerDidCapturePhoto:self]; + } else { + SCLogCoreCameraInfo(@"DidCapturePhoto with unexpected status: %@", + [self _photoCapturerStatusToString:self->_status]); + } + } + }]; +} + +#pragma mark - Private methods + +- (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy +{ + if (!adjustingExposure && self->_shouldCapture) { + SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy); + [self _capturePhotoWithExposureAdjustmentStrategy:strategy]; + self->_shouldCapture = NO; + } +} + +- (void)_capturePhotoFinishedWithImageData:(NSData *)imageData + sampleBuffer:(CMSampleBufferRef)sampleBuffer + cameraInfo:(NSDictionary *)cameraInfo + error:(NSError *)error +{ + [self _photoCaptureDidSucceedWithImageData:imageData + sampleBuffer:sampleBuffer + cameraInfo:cameraInfoForBuffer(sampleBuffer) + error:error]; + self->_status = SCManagedPhotoCapturerStatusDidFinishProcess; +} + +- (void)_capturePhotoFinishedWithImageData:(NSData *)imageData metadata:(NSDictionary *)metadata error:(NSError *)error +{ + [self _photoCaptureDidSucceedWithImageData:imageData metadata:metadata error:error]; + self->_status = SCManagedPhotoCapturerStatusDidFinishProcess; +} + +- (void)_deadlineCapturePhoto +{ + SCTraceStart(); + // Use the SCManagedCapturer's private queue. + @weakify(self); + [_performer perform:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + if (self->_shouldCapture) { + [self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline]; + self->_shouldCapture = NO; + } + } + after:SCCameraTweaksExposureDeadline()]; +} + +- (void)_capturePhotoWithExposureAdjustmentStrategy:(NSString *)strategy +{ + SCTraceStart(); + [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy]; + if (_shouldCaptureFromVideo) { + [self captureStillImageFromVideoBuffer]; + return; + } + SCAssert([_performer isCurrentPerformer], @""); + SCAssert(_photoOutput, @"_photoOutput shouldn't be nil"); + _status = SCManagedPhotoCapturerStatusPrepareToCapture; + AVCapturePhotoOutput *photoOutput = _photoOutput; + AVCaptureConnection *captureConnection = [self _captureConnectionFromPhotoOutput:photoOutput]; + SCManagedCapturerState *state = [_state copy]; +#if !TARGET_IPHONE_SIMULATOR + if (!captureConnection) { + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain + code:kSCManagedStillImageCapturerNoStillImageConnection + userInfo:nil]); + } +#endif + AVCapturePhotoSettings *photoSettings = + [self _photoSettingsWithPhotoOutput:photoOutput captureConnection:captureConnection captureState:state]; + // Select appropriate image capture method + + if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) { + if (!_videoFileMethod) { + _videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init]; + } + [[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"]; + [_delegate managedStillImageCapturerWillCapturePhoto:self]; + [_videoFileMethod captureStillImageWithCapturerState:state + successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) { + [_performer performImmediatelyIfCurrentPerformer:^{ + [self _photoCaptureDidSucceedWithImageData:imageData + sampleBuffer:nil + cameraInfo:cameraInfo + error:error]; + }]; + } + failureBlock:^(NSError *error) { + [_performer performImmediatelyIfCurrentPerformer:^{ + [self _photoCaptureDidFailWithError:error]; + }]; + }]; + } else { + [[SCLogger sharedInstance] logStillImageCaptureApi:@"AVCapturePhoto"]; + [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVCapturePhoto"]; + @try { + [photoOutput capturePhotoWithSettings:photoSettings delegate:self]; + } @catch (NSException *e) { + [SCCrashLogger logHandledException:e]; + [self + _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain + code:kSCManagedPhotoCapturerErrorEncounteredException + userInfo:@{ + @"exception" : e + }]]; + } + } +} + +- (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData + sampleBuffer:(CMSampleBufferRef)sampleBuffer + cameraInfo:(NSDictionary *)cameraInfo + error:(NSError *)error +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + + UIImage *fullScreenImage = [self imageFromData:imageData + currentZoomFactor:_zoomFactor + targetAspectRatio:_aspectRatio + fieldOfView:_fieldOfView + state:_state + sampleBuffer:sampleBuffer]; + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"WILL_START_COMPLETION_HANDLER"]; + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + if (completionHandler) { + completionHandler(fullScreenImage, cameraInfo, error); + } +} + +- (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData + metadata:(NSDictionary *)metadata + error:(NSError *)error +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + + UIImage *fullScreenImage = [self imageFromData:imageData + currentZoomFactor:_zoomFactor + targetAspectRatio:_aspectRatio + fieldOfView:_fieldOfView + state:_state + metadata:metadata]; + [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay + uniqueId:@"IMAGE" + splitPoint:@"WILL_START_COMPLETION_HANDLER"]; + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + if (completionHandler) { + completionHandler(fullScreenImage, metadata, error); + } +} + +- (void)_photoCaptureDidFailWithError:(NSError *)error +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + if (completionHandler) { + completionHandler(nil, nil, error); + } +} + +- (AVCaptureConnection *)_captureConnectionFromPhotoOutput:(AVCapturePhotoOutput *)photoOutput +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + NSArray *connections = [photoOutput.connections copy]; + for (AVCaptureConnection *connection in connections) { + for (AVCaptureInputPort *port in [connection inputPorts]) { + if ([[port mediaType] isEqual:AVMediaTypeVideo]) { + return connection; + } + } + } + return nil; +} + +- (AVCapturePhotoSettings *)_photoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput + captureConnection:(AVCaptureConnection *)captureConnection + captureState:(SCManagedCapturerState *)state +{ + SCTraceStart(); + if ([self _shouldUseBracketPhotoSettingsWithCaptureState:state]) { + return [self _bracketPhotoSettingsWithPhotoOutput:photoOutput + captureConnection:captureConnection + captureState:state]; + } else { + return [self _defaultPhotoSettingsWithPhotoOutput:photoOutput captureState:state]; + } +} + +- (BOOL)_shouldUseBracketPhotoSettingsWithCaptureState:(SCManagedCapturerState *)state +{ + // According to Apple docmentation, AVCapturePhotoBracketSettings do not support flashMode, + // autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata. + // Besides, we only use AVCapturePhotoBracketSettings if capture settings needs to be set manually. + return !state.flashActive && !_portraitModeCaptureEnabled && + (([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) || + [_delegate managedStillImageCapturerIsUnderDeviceMotion:self]); +} + +- (AVCapturePhotoSettings *)_defaultPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput + captureState:(SCManagedCapturerState *)state +{ + SCTraceStart(); + // Specify the output file format + AVCapturePhotoSettings *photoSettings = + [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecJPEG}]; + + // Enable HRSI if necessary + if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) { + photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI; + } + + // Turn on flash if active and supported by device + if (state.flashActive && state.flashSupported) { + photoSettings.flashMode = AVCaptureFlashModeOn; + } + + // Turn on stabilization if available + // Seems that setting autoStillImageStabilizationEnabled doesn't work during video capture session, + // but we set enable it anyway as it is harmless. + if (photoSettings.isAutoStillImageStabilizationEnabled) { + photoSettings.autoStillImageStabilizationEnabled = YES; + } + + if (_portraitModeCaptureEnabled) { + if (@available(ios 11.0, *)) { + photoSettings.depthDataDeliveryEnabled = YES; + } + } + + return photoSettings; +} + +- (AVCapturePhotoSettings *)_bracketPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput + captureConnection:(AVCaptureConnection *)captureConnection + captureState:(SCManagedCapturerState *)state +{ + SCTraceStart(); + OSType rawPixelFormatType = [photoOutput.availableRawPhotoPixelFormatTypes.firstObject unsignedIntValue]; + NSArray *bracketedSettings = + [self _bracketSettingsArray:captureConnection withCaptureState:state]; + SCAssert(bracketedSettings.count <= photoOutput.maxBracketedCapturePhotoCount, + @"Bracket photo count cannot exceed maximum count"); + // Specify the output file format and raw pixel format + AVCapturePhotoBracketSettings *photoSettings = + [AVCapturePhotoBracketSettings photoBracketSettingsWithRawPixelFormatType:rawPixelFormatType + processedFormat:@{ + AVVideoCodecKey : AVVideoCodecJPEG + } + bracketedSettings:bracketedSettings]; + + // Enable HRSI if necessary + if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) { + photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI; + } + + // If lens stabilization is supportd, enable the stabilization when device is moving + if (photoOutput.isLensStabilizationDuringBracketedCaptureSupported && !photoSettings.isLensStabilizationEnabled && + [_delegate managedStillImageCapturerIsUnderDeviceMotion:self]) { + photoSettings.lensStabilizationEnabled = YES; + } + return photoSettings; +} + +- (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection + withCaptureState:(SCManagedCapturerState *)state +{ + NSInteger const stillCount = 1; + NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount]; + AVCaptureDevice *device = [stillImageConnection inputDevice]; + CMTime exposureDuration = device.exposureDuration; + if ([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) { + exposureDuration = [self adjustedExposureDurationForNightModeWithCurrentExposureDuration:exposureDuration]; + } + AVCaptureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings + manualExposureSettingsWithExposureDuration:exposureDuration + ISO:AVCaptureISOCurrent]; + for (NSInteger i = 0; i < stillCount; i++) { + [bracketSettingsArray addObject:settings]; + } + return [bracketSettingsArray copy]; +} + +- (NSString *)_photoCapturerStatusToString:(SCManagedPhotoCapturerStatus)status +{ + switch (status) { + case SCManagedPhotoCapturerStatusPrepareToCapture: + return @"PhotoCapturerStatusPrepareToCapture"; + case SCManagedPhotoCapturerStatusWillCapture: + return @"PhotoCapturerStatusWillCapture"; + case SCManagedPhotoCapturerStatusDidFinishProcess: + return @"PhotoCapturerStatusDidFinishProcess"; + } +} + +@end diff --git a/ManagedCapturer/SCManagedRecordedVideo.h b/ManagedCapturer/SCManagedRecordedVideo.h new file mode 100644 index 0000000..c877384 --- /dev/null +++ b/ManagedCapturer/SCManagedRecordedVideo.h @@ -0,0 +1,36 @@ +// ed265cb0c346ae35dce70d3fc12a0bd8deae0802 +// Generated by the value-object.rb DO NOT EDIT!! + +#import + +#import +#import + +@protocol SCManagedRecordedVideo + +@property (nonatomic, copy, readonly) NSURL *videoURL; + +@property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL; + +@property (nonatomic, copy, readonly) UIImage *placeholderImage; + +@property (nonatomic, assign, readonly) BOOL isFrontFacingCamera; + +@end + +@interface SCManagedRecordedVideo : NSObject + +@property (nonatomic, copy, readonly) NSURL *videoURL; + +@property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL; + +@property (nonatomic, copy, readonly) UIImage *placeholderImage; + +@property (nonatomic, assign, readonly) BOOL isFrontFacingCamera; + +- (instancetype)initWithVideoURL:(NSURL *)videoURL + rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL + placeholderImage:(UIImage *)placeholderImage + isFrontFacingCamera:(BOOL)isFrontFacingCamera; + +@end diff --git a/ManagedCapturer/SCManagedRecordedVideo.m b/ManagedCapturer/SCManagedRecordedVideo.m new file mode 100644 index 0000000..078acce --- /dev/null +++ b/ManagedCapturer/SCManagedRecordedVideo.m @@ -0,0 +1,180 @@ +// ed265cb0c346ae35dce70d3fc12a0bd8deae0802 +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedRecordedVideo.h" + +#import + +@implementation SCManagedRecordedVideo + +- (instancetype)initWithVideoURL:(NSURL *)videoURL + rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL + placeholderImage:(UIImage *)placeholderImage + isFrontFacingCamera:(BOOL)isFrontFacingCamera +{ + self = [super init]; + if (self) { + _videoURL = [(NSObject *)videoURL copy]; + _rawVideoDataFileURL = [(NSObject *)rawVideoDataFileURL copy]; + _placeholderImage = [(NSObject *)placeholderImage copy]; + _isFrontFacingCamera = isFrontFacingCamera; + } + return self; +} + +#pragma mark - NSCopying + +- (instancetype)copyWithZone:(NSZone *)zone +{ + // Immutable object, bypass copy + return self; +} + +#pragma mark - NSCoding + +- (instancetype)initWithCoder:(NSCoder *)aDecoder +{ + self = [super init]; + if (self) { + _videoURL = [aDecoder decodeObjectForKey:@"videoURL"]; + _rawVideoDataFileURL = [aDecoder decodeObjectForKey:@"rawVideoDataFileURL"]; + _placeholderImage = [aDecoder decodeObjectForKey:@"placeholderImage"]; + _isFrontFacingCamera = [aDecoder decodeBoolForKey:@"isFrontFacingCamera"]; + } + return self; +} + +- (void)encodeWithCoder:(NSCoder *)aCoder +{ + [aCoder encodeObject:_videoURL forKey:@"videoURL"]; + [aCoder encodeObject:_rawVideoDataFileURL forKey:@"rawVideoDataFileURL"]; + [aCoder encodeObject:_placeholderImage forKey:@"placeholderImage"]; + [aCoder encodeBool:_isFrontFacingCamera forKey:@"isFrontFacingCamera"]; +} + +#pragma mark - FasterCoding + +- (BOOL)preferFasterCoding +{ + return YES; +} + +- (void)encodeWithFasterCoder:(id)fasterCoder +{ + [fasterCoder encodeBool:_isFrontFacingCamera]; + [fasterCoder encodeObject:_placeholderImage]; + [fasterCoder encodeObject:_rawVideoDataFileURL]; + [fasterCoder encodeObject:_videoURL]; +} + +- (void)decodeWithFasterDecoder:(id)fasterDecoder +{ + _isFrontFacingCamera = (BOOL)[fasterDecoder decodeBool]; + _placeholderImage = (UIImage *)[fasterDecoder decodeObject]; + _rawVideoDataFileURL = (NSURL *)[fasterDecoder decodeObject]; + _videoURL = (NSURL *)[fasterDecoder decodeObject]; +} + +- (void)setObject:(id)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 50783861721184594ULL: + _placeholderImage = (UIImage *)val; + break; + case 13152167848358790ULL: + _rawVideoDataFileURL = (NSURL *)val; + break; + case 48945309622713334ULL: + _videoURL = (NSURL *)val; + break; + } +} + +- (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 11924284868025312ULL: + _isFrontFacingCamera = (BOOL)val; + break; + } +} + ++ (uint64_t)fasterCodingVersion +{ + return 17435789727352013688ULL; +} + ++ (uint64_t *)fasterCodingKeys +{ + static uint64_t keys[] = { + 4 /* Total */, + FC_ENCODE_KEY_TYPE(11924284868025312, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(50783861721184594, FCEncodeTypeObject), + FC_ENCODE_KEY_TYPE(13152167848358790, FCEncodeTypeObject), + FC_ENCODE_KEY_TYPE(48945309622713334, FCEncodeTypeObject), + }; + return keys; +} + +#pragma mark - isEqual + +- (BOOL)isEqual:(id)object +{ + if (self == object) { + return YES; + } + if (![object isMemberOfClass:[self class]]) { + return NO; + } + SCManagedRecordedVideo *other = (SCManagedRecordedVideo *)object; + if (other.videoURL != _videoURL && ![(NSObject *)other.videoURL isEqual:_videoURL]) { + return NO; + } + if (other.rawVideoDataFileURL != _rawVideoDataFileURL && + ![(NSObject *)other.rawVideoDataFileURL isEqual:_rawVideoDataFileURL]) { + return NO; + } + if (other.placeholderImage != _placeholderImage && + ![(NSObject *)other.placeholderImage isEqual:_placeholderImage]) { + return NO; + } + if (other.isFrontFacingCamera != _isFrontFacingCamera) { + return NO; + } + return YES; +} + +- (NSUInteger)hash +{ + NSUInteger subhashes[] = {[_videoURL hash], [_rawVideoDataFileURL hash], [_placeholderImage hash], + (NSUInteger)_isFrontFacingCamera}; + NSUInteger result = subhashes[0]; + for (int i = 1; i < 4; i++) { + unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); + base = (~base) + (base << 18); + base ^= (base >> 31); + base *= 21; + base ^= (base >> 11); + base += (base << 6); + base ^= (base >> 22); + result = (NSUInteger)base; + } + return result; +} + +#pragma mark - Print description in console: lldb> po #{variable name} + +- (NSString *)description +{ + NSMutableString *desc = [NSMutableString string]; + [desc appendString:@"{\n"]; + [desc appendFormat:@"\tvideoURL:%@\n", [_videoURL description]]; + [desc appendFormat:@"\trawVideoDataFileURL:%@\n", [_rawVideoDataFileURL description]]; + [desc appendFormat:@"\tplaceholderImage:%@\n", [_placeholderImage description]]; + [desc appendFormat:@"\tisFrontFacingCamera:%@\n", [@(_isFrontFacingCamera) description]]; + [desc appendString:@"}\n"]; + + return [desc copy]; +} + +@end diff --git a/ManagedCapturer/SCManagedRecordedVideo.value b/ManagedCapturer/SCManagedRecordedVideo.value new file mode 100644 index 0000000..78bfd3c --- /dev/null +++ b/ManagedCapturer/SCManagedRecordedVideo.value @@ -0,0 +1,6 @@ +interface SCManagedRecordedVideo + NSURL *videoURL; + NSURL *rawVideoDataFileURL; + UIImage *placeholderImage; + BOOL isFrontFacingCamera; +end \ No newline at end of file diff --git a/ManagedCapturer/SCManagedStillImageCapturer.h b/ManagedCapturer/SCManagedStillImageCapturer.h new file mode 100644 index 0000000..0d62afd --- /dev/null +++ b/ManagedCapturer/SCManagedStillImageCapturer.h @@ -0,0 +1,92 @@ +// +// SCManagedStillImageCapturer.h +// Snapchat +// +// Created by Liu Liu on 4/30/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCCoreCameraLogger.h" +#import "SCManagedCaptureDevice.h" +#import "SCManagedCapturerListener.h" +#import "SCManagedCapturerState.h" +#import "SCManagedDeviceCapacityAnalyzerListener.h" + +#import +#import + +#import +#import + +SC_EXTERN_C_BEGIN + +extern BOOL SCPhotoCapturerIsEnabled(void); + +SC_EXTERN_C_END + +@protocol SCPerforming; +@protocol SCManagedStillImageCapturerDelegate; +@class SCCaptureResource; + +typedef void (^sc_managed_still_image_capturer_capture_still_image_completion_handler_t)(UIImage *fullScreenImage, + NSDictionary *metadata, + NSError *error); + +@interface SCManagedStillImageCapturer + : NSObject { + SCManagedCapturerState *_state; + BOOL _shouldCaptureFromVideo; + BOOL _captureImageFromVideoImmediately; + CGFloat _aspectRatio; + float _zoomFactor; + float _fieldOfView; + BOOL _adjustingExposureManualDetect; + sc_managed_still_image_capturer_capture_still_image_completion_handler_t _completionHandler; +} + ++ (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource; + +SC_INIT_AND_NEW_UNAVAILABLE; + +@property (nonatomic, weak) id delegate; + +- (void)setupWithSession:(AVCaptureSession *)session; + +- (void)setAsOutput:(AVCaptureSession *)session; + +- (void)removeAsOutput:(AVCaptureSession *)session; + +- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled; + +- (void)setPortraitModeCaptureEnabled:(BOOL)enabled; + +- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest; + +- (void)enableStillImageStabilization; + +- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio + atZoomFactor:(float)zoomFactor + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler; + +- (void)captureStillImageFromVideoBuffer; + +@end + +@protocol SCManagedStillImageCapturerDelegate + +- (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer; + +- (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer; + +@optional + +- (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer; + +- (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer; + +@end diff --git a/ManagedCapturer/SCManagedStillImageCapturer.mm b/ManagedCapturer/SCManagedStillImageCapturer.mm new file mode 100644 index 0000000..91c55de --- /dev/null +++ b/ManagedCapturer/SCManagedStillImageCapturer.mm @@ -0,0 +1,399 @@ +// +// SCManagedStillImageCapturer.m +// Snapchat +// +// Created by Liu Liu on 4/30/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedStillImageCapturer.h" + +#import "SCCameraSettingUtils.h" +#import "SCCameraTweaks.h" +#import "SCCaptureResource.h" +#import "SCLogger+Camera.h" +#import "SCManagedCaptureSession.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerLensAPI.h" +#import "SCManagedFrameHealthChecker.h" +#import "SCManagedLegacyStillImageCapturer.h" +#import "SCManagedPhotoCapturer.h" +#import "SCManagedStillImageCapturerHandler.h" +#import "SCManagedStillImageCapturer_Protected.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import + +#import + +NSString *const kSCManagedStillImageCapturerErrorDomain = @"kSCManagedStillImageCapturerErrorDomain"; + +NSInteger const kSCCameraShutterSoundID = 1108; + +#if !TARGET_IPHONE_SIMULATOR +NSInteger const kSCManagedStillImageCapturerNoStillImageConnection = 1101; +#endif +NSInteger const kSCManagedStillImageCapturerApplicationStateBackground = 1102; + +// We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds. +NSTimeInterval const kSCManagedStillImageCapturerDeadline = 0.4; +NSTimeInterval const kSCCameraRetryInterval = 0.1; + +BOOL SCPhotoCapturerIsEnabled(void) +{ + // Due to the native crash in https://jira.sc-corp.net/browse/CCAM-4904, we guard it >= 10.2 + return SC_AT_LEAST_IOS_10_2; +} + +NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer) +{ + CFDictionaryRef exifAttachments = + (CFDictionaryRef)CMGetAttachment(imageDataSampleBuffer, kCGImagePropertyExifDictionary, NULL); + float brightness = [retrieveBrightnessFromEXIFAttachments(exifAttachments) floatValue]; + NSInteger ISOSpeedRating = [retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments) integerValue]; + return @{ + (__bridge NSString *) kCGImagePropertyExifISOSpeedRatings : @(ISOSpeedRating), (__bridge NSString *) + kCGImagePropertyExifBrightnessValue : @(brightness) + }; +} + +@implementation SCManagedStillImageCapturer + ++ (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource +{ + if (SCPhotoCapturerIsEnabled()) { + return [[SCManagedPhotoCapturer alloc] initWithSession:captureResource.managedSession.avSession + performer:captureResource.queuePerformer + lensProcessingCore:captureResource.lensProcessingCore + delegate:captureResource.stillImageCapturerHandler]; + } else { + return [[SCManagedLegacyStillImageCapturer alloc] initWithSession:captureResource.managedSession.avSession + performer:captureResource.queuePerformer + lensProcessingCore:captureResource.lensProcessingCore + delegate:captureResource.stillImageCapturerHandler]; + } +} + +- (instancetype)initWithSession:(AVCaptureSession *)session + performer:(id)performer + lensProcessingCore:(id)lensAPI + delegate:(id)delegate +{ + self = [super init]; + if (self) { + _session = session; + _performer = performer; + _lensAPI = lensAPI; + _delegate = delegate; + } + return self; +} + +- (void)setupWithSession:(AVCaptureSession *)session +{ + UNIMPLEMENTED_METHOD; +} + +- (void)setAsOutput:(AVCaptureSession *)session +{ + UNIMPLEMENTED_METHOD; +} + +- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled +{ + UNIMPLEMENTED_METHOD; +} + +- (void)enableStillImageStabilization +{ + UNIMPLEMENTED_METHOD; +} + +- (void)removeAsOutput:(AVCaptureSession *)session +{ + UNIMPLEMENTED_METHOD; +} + +- (void)setPortraitModeCaptureEnabled:(BOOL)enabled +{ + UNIMPLEMENTED_METHOD; +} + +- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest +{ + UNIMPLEMENTED_METHOD; +} + +- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio + atZoomFactor:(float)zoomFactor + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler +{ + UNIMPLEMENTED_METHOD; +} + +#pragma mark - SCManagedDeviceCapacityAnalyzerListener + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + UNIMPLEMENTED_METHOD; +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition +{ + UNIMPLEMENTED_METHOD; +} + +#pragma mark - SCManagedCapturerListener + +- (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state +{ + UNIMPLEMENTED_METHOD; +} + +- (UIImage *)imageFromData:(NSData *)data + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + sampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data] + currentZoomFactor:currentZoomFactor + targetAspectRatio:targetAspectRatio + fieldOfView:fieldOfView + state:state]; + // Check capture frame health before showing preview + NSDictionary *metadata = + [[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer + photoCapturerEnabled:SCPhotoCapturerIsEnabled() + lensEnabled:state.lensesActive + lensID:[_lensAPI activeLensId]]; + [[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage + captureSettings:metadata + captureSessionID:_captureSessionID]; + _captureSessionID = nil; + return capturedImage; +} + +- (UIImage *)imageFromData:(NSData *)data + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + metadata:(NSDictionary *)metadata +{ + UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data] + currentZoomFactor:currentZoomFactor + targetAspectRatio:targetAspectRatio + fieldOfView:fieldOfView + state:state]; + // Check capture frame health before showing preview + NSDictionary *newMetadata = + [[SCManagedFrameHealthChecker sharedInstance] metadataForMetadata:metadata + photoCapturerEnabled:SCPhotoCapturerIsEnabled() + lensEnabled:state.lensesActive + lensID:[_lensAPI activeLensId]]; + [[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage + captureSettings:newMetadata + captureSessionID:_captureSessionID]; + _captureSessionID = nil; + return capturedImage; +} + +- (UIImage *)imageFromImage:(UIImage *)image + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state +{ + UIImage *fullScreenImage = image; + if (state.lensesActive && _lensAPI.isLensApplied) { + fullScreenImage = [_lensAPI processImage:fullScreenImage + maxPixelSize:[_lensAPI maxPixelSize] + devicePosition:state.devicePosition + fieldOfView:fieldOfView]; + } + // Resize and crop + return [self resizeImage:fullScreenImage currentZoomFactor:currentZoomFactor targetAspectRatio:targetAspectRatio]; +} + +- (UIImage *)resizeImage:(UIImage *)image + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio +{ + SCTraceStart(); + if (currentZoomFactor == 1) { + return SCCropImageToTargetAspectRatio(image, targetAspectRatio); + } else { + @autoreleasepool { + return [self resizeImageUsingCG:image + currentZoomFactor:currentZoomFactor + targetAspectRatio:targetAspectRatio + maxPixelSize:[_lensAPI maxPixelSize]]; + } + } +} + +- (UIImage *)resizeImageUsingCG:(UIImage *)inputImage + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + maxPixelSize:(CGFloat)maxPixelSize +{ + size_t imageWidth = CGImageGetWidth(inputImage.CGImage); + size_t imageHeight = CGImageGetHeight(inputImage.CGImage); + SCLogGeneralInfo(@"Captured still image at %dx%d", (int)imageWidth, (int)imageHeight); + size_t targetWidth, targetHeight; + float zoomFactor = currentZoomFactor; + if (imageWidth > imageHeight) { + targetWidth = maxPixelSize; + targetHeight = (maxPixelSize * imageHeight + imageWidth / 2) / imageWidth; + // Update zoom factor here + zoomFactor *= (float)maxPixelSize / imageWidth; + } else { + targetHeight = maxPixelSize; + targetWidth = (maxPixelSize * imageWidth + imageHeight / 2) / imageHeight; + zoomFactor *= (float)maxPixelSize / imageHeight; + } + if (targetAspectRatio != kSCManagedCapturerAspectRatioUnspecified) { + SCCropImageSizeToAspectRatio(targetWidth, targetHeight, inputImage.imageOrientation, targetAspectRatio, + &targetWidth, &targetHeight); + } + CGContextRef context = + CGBitmapContextCreate(NULL, targetWidth, targetHeight, CGImageGetBitsPerComponent(inputImage.CGImage), + CGImageGetBitsPerPixel(inputImage.CGImage) * targetWidth / 8, + CGImageGetColorSpace(inputImage.CGImage), CGImageGetBitmapInfo(inputImage.CGImage)); + CGContextSetInterpolationQuality(context, kCGInterpolationHigh); + CGContextDrawImage(context, CGRectMake(targetWidth * 0.5 - imageWidth * 0.5 * zoomFactor, + targetHeight * 0.5 - imageHeight * 0.5 * zoomFactor, imageWidth * zoomFactor, + imageHeight * zoomFactor), + inputImage.CGImage); + CGImageRef thumbnail = CGBitmapContextCreateImage(context); + CGContextRelease(context); + UIImage *image = + [UIImage imageWithCGImage:thumbnail scale:inputImage.scale orientation:inputImage.imageOrientation]; + CGImageRelease(thumbnail); + return image; +} + +- (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration +{ + CMTime adjustedExposureDuration = exposureDuration; + if (_lightingConditionType == SCCapturerLightingConditionTypeDark) { + adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 1.5); + } else if (_lightingConditionType == SCCapturerLightingConditionTypeExtremeDark) { + adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 2.5); + } + return adjustedExposureDuration; +} + +#pragma mark - SCManagedVideoDataSourceListener + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + SC_GUARD_ELSE_RETURN(_captureImageFromVideoImmediately); + _captureImageFromVideoImmediately = NO; + @weakify(self); + CFRetain(sampleBuffer); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + [self _didCapturePhotoFromVideoBuffer]; + UIImageOrientation orientation = devicePosition == SCManagedCaptureDevicePositionBack + ? UIImageOrientationRight + : UIImageOrientationLeftMirrored; + UIImage *videoImage = [UIImage imageWithPixelBufferRef:CMSampleBufferGetImageBuffer(sampleBuffer) + backingType:UIImageBackingTypeCGImage + orientation:orientation + context:[CIContext contextWithOptions:nil]]; + UIImage *fullScreenImage = [self imageFromImage:videoImage + currentZoomFactor:_zoomFactor + targetAspectRatio:_aspectRatio + fieldOfView:_fieldOfView + state:_state]; + NSMutableDictionary *cameraInfo = [cameraInfoForBuffer(sampleBuffer) mutableCopy]; + cameraInfo[@"capture_image_from_video_buffer"] = @"enabled"; + [self _didFinishProcessingFromVideoBufferWithImage:fullScreenImage cameraInfo:cameraInfo]; + CFRelease(sampleBuffer); + }]; +} + +- (void)_willBeginCapturePhotoFromVideoBuffer +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) { + [self->_delegate managedStillImageCapturerWillCapturePhoto:self]; + } + }]; +} + +- (void)_didCapturePhotoFromVideoBuffer +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) { + [self->_delegate managedStillImageCapturerDidCapturePhoto:self]; + } + }]; +} + +- (void)_didFinishProcessingFromVideoBufferWithImage:(UIImage *)image cameraInfo:(NSDictionary *)cameraInfo +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + if (completionHandler) { + completionHandler(image, cameraInfo, nil); + } + }]; +} + +- (void)captureStillImageFromVideoBuffer +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + AudioServicesPlaySystemSoundWithCompletion(kSCCameraShutterSoundID, nil); + [self _willBeginCapturePhotoFromVideoBuffer]; + self->_captureImageFromVideoImmediately = YES; + }]; +} + +@end diff --git a/ManagedCapturer/SCManagedStillImageCapturerHandler.h b/ManagedCapturer/SCManagedStillImageCapturerHandler.h new file mode 100644 index 0000000..7535483 --- /dev/null +++ b/ManagedCapturer/SCManagedStillImageCapturerHandler.h @@ -0,0 +1,21 @@ +// +// SCManagedStillImageCapturerHandler.h +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedStillImageCapturer.h" + +#import + +@class SCCaptureResource; +@protocol SCDeviceMotionProvider +, SCFileInputDecider; + +@interface SCManagedStillImageCapturerHandler : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; + +@end diff --git a/ManagedCapturer/SCManagedStillImageCapturerHandler.m b/ManagedCapturer/SCManagedStillImageCapturerHandler.m new file mode 100644 index 0000000..0b39565 --- /dev/null +++ b/ManagedCapturer/SCManagedStillImageCapturerHandler.m @@ -0,0 +1,85 @@ +// +// SCManagedStillImageCapturerHandler.m +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedStillImageCapturerHandler.h" + +#import "SCCaptureResource.h" +#import "SCManagedCaptureDevice+SCManagedCapturer.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerSampleMetadata.h" +#import "SCManagedCapturerState.h" + +#import +#import +#import +#import + +@interface SCManagedStillImageCapturerHandler () { + __weak SCCaptureResource *_captureResource; +} + +@end + +@implementation SCManagedStillImageCapturerHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + SCAssert(captureResource, @""); + _captureResource = captureResource; + } + return self; +} + +- (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Will capture photo. stillImageCapturer:%@", _captureResource.stillImageCapturer); + [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + if (_captureResource.stillImageCapturer) { + SCManagedCapturerState *state = [_captureResource.state copy]; + SCManagedCapturerSampleMetadata *sampleMetadata = [[SCManagedCapturerSampleMetadata alloc] + initWithPresentationTimestamp:kCMTimeZero + fieldOfView:_captureResource.device.fieldOfView]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + willCapturePhoto:state + sampleMetadata:sampleMetadata]; + }); + } + }]; +} + +- (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did capture photo. stillImageCapturer:%@", _captureResource.stillImageCapturer); + [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + if (_captureResource.stillImageCapturer) { + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didCapturePhoto:state]; + }); + } + }]; +} + +- (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer +{ + return _captureResource.deviceMotionProvider.isUnderDeviceMotion; +} + +- (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer +{ + return _captureResource.fileInputDecider.shouldProcessFileInput; +} + +@end diff --git a/ManagedCapturer/SCManagedStillImageCapturer_Protected.h b/ManagedCapturer/SCManagedStillImageCapturer_Protected.h new file mode 100644 index 0000000..30fe409 --- /dev/null +++ b/ManagedCapturer/SCManagedStillImageCapturer_Protected.h @@ -0,0 +1,63 @@ +// +// SCManagedStillImageCapturer_Protected.h +// Snapchat +// +// Created by Chao Pang on 10/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +SC_EXTERN_C_BEGIN +extern NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer); +SC_EXTERN_C_END + +extern NSString *const kSCManagedStillImageCapturerErrorDomain; + +#if !TARGET_IPHONE_SIMULATOR +extern NSInteger const kSCManagedStillImageCapturerNoStillImageConnection; +#endif +extern NSInteger const kSCManagedStillImageCapturerApplicationStateBackground; + +// We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds. +extern NSTimeInterval const kSCManagedStillImageCapturerDeadline; +extern NSTimeInterval const kSCCameraRetryInterval; + +@protocol SCManagedCapturerLensAPI; + +@interface SCManagedStillImageCapturer () { + @protected + id _lensAPI; + id _performer; + AVCaptureSession *_session; + id __weak _delegate; + NSString *_captureSessionID; + SCCapturerLightingConditionType _lightingConditionType; +} + +- (instancetype)initWithSession:(AVCaptureSession *)session + performer:(id)performer + lensProcessingCore:(id)lensProcessingCore + delegate:(id)delegate; + +- (UIImage *)imageFromData:(NSData *)data + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + sampleBuffer:(CMSampleBufferRef)sampleBuffer; + +- (UIImage *)imageFromData:(NSData *)data + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + metadata:(NSDictionary *)metadata; + +- (UIImage *)imageFromImage:(UIImage *)image + currentZoomFactor:(float)currentZoomFactor + targetAspectRatio:(CGFloat)targetAspectRatio + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state; + +- (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration; + +@end diff --git a/ManagedCapturer/SCManagedVideoARDataSource.h b/ManagedCapturer/SCManagedVideoARDataSource.h new file mode 100644 index 0000000..b5486c3 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoARDataSource.h @@ -0,0 +1,24 @@ +// +// SCManagedVideoARDataSource.h +// Snapchat +// +// Created by Eyal Segal on 20/10/2017. +// + +#import "SCCapturerDefines.h" + +#import + +#import + +@protocol SCManagedVideoARDataSource + +@property (atomic, strong) ARFrame *currentFrame NS_AVAILABLE_IOS(11_0); + +#ifdef SC_USE_ARKIT_FACE +@property (atomic, strong) AVDepthData *lastDepthData NS_AVAILABLE_IOS(11_0); +#endif + +@property (atomic, assign) float fieldOfView NS_AVAILABLE_IOS(11_0); + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturer.h b/ManagedCapturer/SCManagedVideoCapturer.h new file mode 100644 index 0000000..1a4a16e --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturer.h @@ -0,0 +1,102 @@ +// +// SCManagedVideoCapturer.h +// Snapchat +// +// Created by Liu Liu on 5/1/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedRecordedVideo.h" +#import "SCManagedVideoCapturerOutputSettings.h" +#import "SCVideoCaptureSessionInfo.h" + +#import +#import +#import + +#import +#import + +typedef void (^sc_managed_video_capturer_recording_completion_handler_t)(NSURL *fileURL, NSError *error); + +@class SCManagedVideoCapturer, SCTimedTask; + +@protocol SCManagedVideoCapturerDelegate + +// All these calbacks are invoked on a private queue for video recording channels + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + willStopWithRecordedVideoFuture:(SCFuture> *)videoProviderFuture + videoSize:(CGSize)videoSize + placeholderImage:(UIImage *)placeholderImage + session:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo + session:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didFailWithError:(NSError *)error + session:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didGetError:(NSError *)error + forType:(SCManagedVideoCapturerInfoType)type + session:(SCVideoCaptureSessionInfo)sessionInfo; + +- (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer; + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer + presentationTimestamp:(CMTime)presentationTimestamp; + +@end + +/** + * AVFoundation backed class that writes frames to an output file. SCManagedVideoCapturer + * uses SCManagedVideoCapturerOutputSettings to determine output settings. If no output + * settings are passed in (nil) SCManagedVideoCapturer will fall back on default settings. + */ +@interface SCManagedVideoCapturer : NSObject + +/** + * Return the output URL that passed into beginRecordingToURL method + */ +@property (nonatomic, copy, readonly) NSURL *outputURL; + +@property (nonatomic, weak) id delegate; +@property (nonatomic, readonly) SCVideoCaptureSessionInfo activeSession; +@property (nonatomic, assign, readonly) CMTime firstWrittenAudioBufferDelay; +@property (nonatomic, assign, readonly) BOOL audioQueueStarted; + +- (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer; + +- (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration; +- (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings: + (SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)audioConfiguration + maxDuration:(NSTimeInterval)maxDuration + toURL:(NSURL *)URL + deviceFormat:(AVCaptureDeviceFormat *)deviceFormat + orientation:(AVCaptureVideoOrientation)videoOrientation + captureSessionID:(NSString *)captureSessionID; + +- (void)stopRecordingAsynchronously; +- (void)cancelRecordingAsynchronously; + +// Schedule a task to run, it is thread safe. +- (void)addTimedTask:(SCTimedTask *)task; + +// Clear all tasks, it is thread safe. +- (void)clearTimedTasks; + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturer.m b/ManagedCapturer/SCManagedVideoCapturer.m new file mode 100644 index 0000000..60f2002 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturer.m @@ -0,0 +1,1107 @@ +// +// SCManagedVideoCapturer.m +// Snapchat +// +// Created by Liu Liu on 5/1/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedVideoCapturer.h" + +#import "NSURL+Asset.h" +#import "SCAudioCaptureSession.h" +#import "SCCameraTweaks.h" +#import "SCCapturerBufferedVideoWriter.h" +#import "SCCoreCameraLogger.h" +#import "SCLogger+Camera.h" +#import "SCManagedCapturer.h" +#import "SCManagedFrameHealthChecker.h" +#import "SCManagedVideoCapturerLogger.h" +#import "SCManagedVideoCapturerTimeObserver.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import + +#import + +@import CoreMedia; +@import ImageIO; + +static NSString *const kSCAudioCaptureAudioSessionLabel = @"CAMERA"; + +// wild card audio queue error code +static NSInteger const kSCAudioQueueErrorWildCard = -50; +// kAudioHardwareIllegalOperationError, it means hardware failure +static NSInteger const kSCAudioQueueErrorHardware = 1852797029; + +typedef NS_ENUM(NSUInteger, SCManagedVideoCapturerStatus) { + SCManagedVideoCapturerStatusUnknown, + SCManagedVideoCapturerStatusIdle, + SCManagedVideoCapturerStatusPrepareToRecord, + SCManagedVideoCapturerStatusReadyForRecording, + SCManagedVideoCapturerStatusRecording, + SCManagedVideoCapturerStatusError, +}; + +#define SCLogVideoCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__) +#define SCLogVideoCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__) +#define SCLogVideoCapturerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__) + +@interface SCManagedVideoCapturer () +// This value has to be atomic because it is read on a different thread (write +// on output queue, as always) +@property (atomic, assign, readwrite) SCManagedVideoCapturerStatus status; + +@property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay; + +@end + +static char *const kSCManagedVideoCapturerQueueLabel = "com.snapchat.managed-video-capturer-queue"; +static char *const kSCManagedVideoCapturerPromiseQueueLabel = "com.snapchat.video-capture-promise"; + +static NSString *const kSCManagedVideoCapturerErrorDomain = @"kSCManagedVideoCapturerErrorDomain"; + +static NSInteger const kSCManagedVideoCapturerCannotAddAudioVideoInput = 1001; +static NSInteger const kSCManagedVideoCapturerEmptyFrame = 1002; +static NSInteger const kSCManagedVideoCapturerStopBeforeStart = 1003; +static NSInteger const kSCManagedVideoCapturerStopWithoutStart = 1004; +static NSInteger const kSCManagedVideoCapturerZeroVideoSize = -111; + +static NSUInteger const kSCVideoContentComplexitySamplingRate = 90; + +// This is the maximum time we will wait for the Recording Capturer pipeline to drain +// When video stabilization is turned on the extra frame delay is around 20 frames. +// @30 fps this is 0.66 seconds +static NSTimeInterval const kSCManagedVideoCapturerStopRecordingDeadline = 1.0; + +static const char *SCPlaceholderImageGenerationQueueLabel = "com.snapchat.video-capturer-placeholder-queue"; + +static const char *SCVideoRecordingPreparationQueueLabel = "com.snapchat.video-recording-preparation-queue"; + +static dispatch_queue_t SCPlaceholderImageGenerationQueue(void) +{ + static dispatch_queue_t queue; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + queue = dispatch_queue_create(SCPlaceholderImageGenerationQueueLabel, DISPATCH_QUEUE_SERIAL); + }); + return queue; +} + +@interface SCManagedVideoCapturer () + +@end + +@implementation SCManagedVideoCapturer { + NSTimeInterval _maxDuration; + NSTimeInterval _recordStartTime; + + SCCapturerBufferedVideoWriter *_videoWriter; + + BOOL _hasWritten; + SCQueuePerformer *_performer; + SCQueuePerformer *_videoPreparationPerformer; + SCAudioCaptureSession *_audioCaptureSession; + NSError *_lastError; + UIImage *_placeholderImage; + + // For logging purpose + BOOL _isVideoSnap; + NSDictionary *_videoOutputSettings; + + // The following value is used to control the encoder shutdown following a stop recording message. + // When a shutdown is requested this value will be the timestamp of the last captured frame. + CFTimeInterval _stopTime; + NSInteger _stopSession; + SCAudioConfigurationToken *_preparedAudioConfiguration; + SCAudioConfigurationToken *_audioConfiguration; + + dispatch_semaphore_t _startRecordingSemaphore; + + // For store the raw frame datas + NSInteger _rawDataFrameNum; + NSURL *_rawDataURL; + SCVideoFrameRawDataCollector *_videoFrameRawDataCollector; + + CMTime _startSessionTime; + // Indicates how actual processing time of first frame. Also used for camera timer animation start offset. + NSTimeInterval _startSessionRealTime; + CMTime _endSessionTime; + sc_managed_capturer_recording_session_t _sessionId; + + SCManagedVideoCapturerTimeObserver *_timeObserver; + SCManagedVideoCapturerLogger *_capturerLogger; + + CGSize _outputSize; + BOOL _isFrontFacingCamera; + SCPromise> *_recordedVideoPromise; + SCManagedAudioDataSourceListenerAnnouncer *_announcer; + + NSString *_captureSessionID; + CIContext *_ciContext; +} + +@synthesize performer = _performer; + +- (instancetype)init +{ + SCTraceStart(); + return [self initWithQueuePerformer:[[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoCapturerQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]]; +} + +- (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer +{ + SCTraceStart(); + self = [super init]; + if (self) { + _performer = queuePerformer; + _audioCaptureSession = [[SCAudioCaptureSession alloc] init]; + _audioCaptureSession.delegate = self; + _announcer = [SCManagedAudioDataSourceListenerAnnouncer new]; + self.status = SCManagedVideoCapturerStatusIdle; + _capturerLogger = [[SCManagedVideoCapturerLogger alloc] init]; + _startRecordingSemaphore = dispatch_semaphore_create(0); + } + return self; +} + +- (void)dealloc +{ + SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo before dealloc: %@", + SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); +} + +- (SCVideoCaptureSessionInfo)activeSession +{ + return SCVideoCaptureSessionInfoMake(_startSessionTime, _endSessionTime, _sessionId); +} + +- (CGSize)defaultSizeForDeviceFormat:(AVCaptureDeviceFormat *)format +{ + SCTraceStart(); + // if there is no device, and no format + if (format == nil) { + // hard code 720p + return CGSizeMake(kSCManagedCapturerDefaultVideoActiveFormatWidth, + kSCManagedCapturerDefaultVideoActiveFormatHeight); + } + CMVideoDimensions videoDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + CGSize size = CGSizeMake(videoDimensions.width, videoDimensions.height); + if (videoDimensions.width > kSCManagedCapturerDefaultVideoActiveFormatWidth && + videoDimensions.height > kSCManagedCapturerDefaultVideoActiveFormatHeight) { + CGFloat scaleFactor = MAX((kSCManagedCapturerDefaultVideoActiveFormatWidth / videoDimensions.width), + (kSCManagedCapturerDefaultVideoActiveFormatHeight / videoDimensions.height)); + size = SCSizeMakeAlignTo(SCSizeApplyScale(size, scaleFactor), 2); + } + if ([SCDeviceName isIphoneX]) { + size = SCSizeApplyScale(size, kSCIPhoneXCapturedImageVideoCropRatio); + } + return size; +} + +- (CGSize)cropSize:(CGSize)size toAspectRatio:(CGFloat)aspectRatio +{ + if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) { + return size; + } + // video input is always in landscape mode + aspectRatio = 1.0 / aspectRatio; + if (size.width > size.height * aspectRatio) { + size.width = size.height * aspectRatio; + } else { + size.height = size.width / aspectRatio; + } + return CGSizeMake(roundf(size.width / 2) * 2, roundf(size.height / 2) * 2); +} + +- (SCManagedVideoCapturerOutputSettings *)defaultRecordingOutputSettingsWithDeviceFormat: + (AVCaptureDeviceFormat *)deviceFormat +{ + SCTraceStart(); + CGFloat aspectRatio = SCManagedCapturedImageAndVideoAspectRatio(); + CGSize outputSize = [self defaultSizeForDeviceFormat:deviceFormat]; + outputSize = [self cropSize:outputSize toAspectRatio:aspectRatio]; + + // [TODO](Chao): remove the dependency of SCManagedVideoCapturer on SnapVideoMetaData + NSInteger videoBitRate = [SnapVideoMetadata averageTranscodingBitRate:outputSize + isRecording:YES + highQuality:YES + duration:0 + iFrameOnly:NO + originalVideoBitRate:0 + overlayImageFileSizeBits:0 + videoPlaybackRate:1 + isLagunaVideo:NO + hasOverlayToBlend:NO + sourceType:SCSnapVideoFilterSourceTypeUndefined]; + SCTraceSignal(@"Setup transcoding video bitrate"); + [_capturerLogger logStartingStep:kSCCapturerStartingStepTranscodeingVideoBitrate]; + + SCManagedVideoCapturerOutputSettings *outputSettings = + [[SCManagedVideoCapturerOutputSettings alloc] initWithWidth:outputSize.width + height:outputSize.height + videoBitRate:videoBitRate + audioBitRate:64000.0 + keyFrameInterval:15 + outputType:SCManagedVideoCapturerOutputTypeVideoSnap]; + + return outputSettings; +} + +- (SCQueuePerformer *)_getVideoPreparationPerformer +{ + SCAssert([_performer isCurrentPerformer], @"must run on _performer"); + if (!_videoPreparationPerformer) { + _videoPreparationPerformer = [[SCQueuePerformer alloc] initWithLabel:SCVideoRecordingPreparationQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + } + return _videoPreparationPerformer; +} + +- (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration +{ + SCTraceStart(); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + self.status = SCManagedVideoCapturerStatusPrepareToRecord; + if (_audioConfiguration) { + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; + } + __block NSError *audioSessionError = nil; + _preparedAudioConfiguration = _audioConfiguration = + [SCAudioSessionExperimentAdapter configureWith:configuration + performer:[self _getVideoPreparationPerformer] + completion:^(NSError *error) { + audioSessionError = error; + if (self.status == SCManagedVideoCapturerStatusPrepareToRecord) { + dispatch_semaphore_signal(_startRecordingSemaphore); + } + }]; + + // Wait until preparation for recording is done + dispatch_semaphore_wait(_startRecordingSemaphore, DISPATCH_TIME_FOREVER); + [_delegate managedVideoCapturer:self + didGetError:audioSessionError + forType:SCManagedVideoCapturerInfoAudioSessionError + session:self.activeSession]; + }]; +} + +- (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings: + (SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)audioConfiguration + maxDuration:(NSTimeInterval)maxDuration + toURL:(NSURL *)URL + deviceFormat:(AVCaptureDeviceFormat *)deviceFormat + orientation:(AVCaptureVideoOrientation)videoOrientation + captureSessionID:(NSString *)captureSessionID +{ + SCTraceStart(); + _captureSessionID = [captureSessionID copy]; + [_capturerLogger prepareForStartingLog]; + + [[SCLogger sharedInstance] logTimedEventStart:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + isUniqueEvent:NO]; + + NSTimeInterval startTime = CACurrentMediaTime(); + [[SCLogger sharedInstance] logPreCaptureOperationRequestedAt:startTime]; + [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointPreCaptureOperationRequested]; + _sessionId = arc4random(); + + // Set a invalid time so that we don't process videos when no frame available + _startSessionTime = kCMTimeInvalid; + _endSessionTime = kCMTimeInvalid; + _firstWrittenAudioBufferDelay = kCMTimeInvalid; + _audioQueueStarted = NO; + + SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo at start of recording: %@", + SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); + + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [_performer performImmediatelyIfCurrentPerformer:^{ + _maxDuration = maxDuration; + dispatch_block_t startRecordingBlock = ^{ + _rawDataFrameNum = 0; + // Begin audio recording asynchronously, first, need to have correct audio session. + SCTraceStart(); + SCLogVideoCapturerInfo(@"Dequeue begin recording with audio session change delay: %lf seconds", + CACurrentMediaTime() - startTime); + if (self.status != SCManagedVideoCapturerStatusReadyForRecording) { + SCLogVideoCapturerInfo(@"SCManagedVideoCapturer status: %lu", (unsigned long)self.status); + // We may already released, but this should be OK. + [SCAudioSessionExperimentAdapter relinquishConfiguration:_preparedAudioConfiguration + performer:nil + completion:nil]; + return; + } + if (_preparedAudioConfiguration != _audioConfiguration) { + SCLogVideoCapturerInfo( + @"SCManagedVideoCapturer has mismatched audio session token, prepared: %@, have: %@", + _preparedAudioConfiguration.token, _audioConfiguration.token); + // We are on a different audio session token already. + [SCAudioSessionExperimentAdapter relinquishConfiguration:_preparedAudioConfiguration + performer:nil + completion:nil]; + return; + } + + // Divide start recording workflow into different steps to log delay time. + // And checkpoint is the end of a step + [_capturerLogger logStartingStep:kSCCapturerStartingStepAudioSession]; + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + stepName:@"audio_session_start_end"]; + + SCLogVideoCapturerInfo(@"Prepare to begin recording"); + _lastError = nil; + + // initialize stopTime to a number much larger than the CACurrentMediaTime() which is the time from Jan 1, + // 2001 + _stopTime = kCFAbsoluteTimeIntervalSince1970; + + // Restart everything + _hasWritten = NO; + + SCManagedVideoCapturerOutputSettings *finalOutputSettings = + outputSettings ? outputSettings : [self defaultRecordingOutputSettingsWithDeviceFormat:deviceFormat]; + _isVideoSnap = finalOutputSettings.outputType == SCManagedVideoCapturerOutputTypeVideoSnap; + _outputSize = CGSizeMake(finalOutputSettings.height, finalOutputSettings.width); + [[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoRecordingStart + parameters:@{ + @"video_width" : @(finalOutputSettings.width), + @"video_height" : @(finalOutputSettings.height), + @"bit_rate" : @(finalOutputSettings.videoBitRate), + @"is_video_snap" : @(_isVideoSnap), + }]; + + _outputURL = [URL copy]; + _rawDataURL = [_outputURL URLByAppendingPathExtension:@"dat"]; + [_capturerLogger logStartingStep:kSCCapturerStartingStepOutputSettings]; + + // Make sure the raw frame data file is gone + SCTraceSignal(@"Setup video frame raw data"); + [[NSFileManager defaultManager] removeItemAtURL:_rawDataURL error:NULL]; + if ([SnapVideoMetadata deviceMeetsRequirementsForContentAdaptiveVideoEncoding]) { + if (!_videoFrameRawDataCollector) { + _videoFrameRawDataCollector = [[SCVideoFrameRawDataCollector alloc] initWithPerformer:_performer]; + } + [_videoFrameRawDataCollector prepareForCollectingVideoFrameRawDataWithRawDataURL:_rawDataURL]; + } + [_capturerLogger logStartingStep:kSCCapturerStartingStepVideoFrameRawData]; + + SCLogVideoCapturerInfo(@"Prepare to begin audio recording"); + + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + stepName:@"audio_queue_start_begin"]; + [self _beginAudioQueueRecordingWithCompleteHandler:^(NSError *error) { + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + stepName:@"audio_queue_start_end"]; + if (error) { + [_delegate managedVideoCapturer:self + didGetError:error + forType:SCManagedVideoCapturerInfoAudioQueueError + session:sessionInfo]; + } else { + _audioQueueStarted = YES; + } + if (self.status == SCManagedVideoCapturerStatusRecording) { + [_delegate managedVideoCapturer:self didBeginAudioRecording:sessionInfo]; + } + }]; + + // Call this delegate first so that we have proper state transition from begin recording to finish / error + [_delegate managedVideoCapturer:self didBeginVideoRecording:sessionInfo]; + + // We need to start with a fresh recording file, make sure it's gone + [[NSFileManager defaultManager] removeItemAtURL:_outputURL error:NULL]; + [_capturerLogger logStartingStep:kSCCapturerStartingStepAudioRecording]; + + SCTraceSignal(@"Setup asset writer"); + + NSError *error = nil; + _videoWriter = [[SCCapturerBufferedVideoWriter alloc] initWithPerformer:_performer + outputURL:self.outputURL + delegate:self + error:&error]; + if (error) { + self.status = SCManagedVideoCapturerStatusError; + _lastError = error; + _placeholderImage = nil; + [_delegate managedVideoCapturer:self + didGetError:error + forType:SCManagedVideoCapturerInfoAssetWriterError + session:sessionInfo]; + [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; + return; + } + + [_capturerLogger logStartingStep:kSCCapturerStartingStepAssetWriterConfiguration]; + if (![_videoWriter prepareWritingWithOutputSettings:finalOutputSettings]) { + _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain + code:kSCManagedVideoCapturerCannotAddAudioVideoInput + userInfo:nil]; + _placeholderImage = nil; + [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; + return; + } + SCTraceSignal(@"Observe asset writer status change"); + SCCAssert(_placeholderImage == nil, @"placeholderImage should be nil"); + self.status = SCManagedVideoCapturerStatusRecording; + // Only log the recording delay event from camera view (excluding video note recording) + if (_isVideoSnap) { + [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsRecordingDelay + uniqueId:@"VIDEO" + parameters:@{ + @"type" : @"video" + }]; + } + _recordStartTime = CACurrentMediaTime(); + }; + + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + stepName:@"audio_session_start_begin"]; + + if (self.status == SCManagedVideoCapturerStatusPrepareToRecord) { + self.status = SCManagedVideoCapturerStatusReadyForRecording; + startRecordingBlock(); + } else { + self.status = SCManagedVideoCapturerStatusReadyForRecording; + if (_audioConfiguration) { + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration + performer:nil + completion:nil]; + } + _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter + configureWith:audioConfiguration + performer:_performer + completion:^(NSError *error) { + if (error) { + [_delegate managedVideoCapturer:self + didGetError:error + forType:SCManagedVideoCapturerInfoAudioSessionError + session:sessionInfo]; + } + startRecordingBlock(); + }]; + } + }]; + return sessionInfo; +} + +- (NSError *)_handleRetryBeginAudioRecordingErrorCode:(NSInteger)errorCode + error:(NSError *)error + micResult:(NSDictionary *)resultInfo +{ + SCTraceStart(); + NSString *resultStr = SC_CAST_TO_CLASS_OR_NIL(resultInfo[SCAudioSessionRetryDataSourceInfoKey], NSString); + BOOL changeMicSuccess = [resultInfo[SCAudioSessionRetryDataSourceResultKey] boolValue]; + if (!error) { + SCManagedVideoCapturerInfoType type = SCManagedVideoCapturerInfoAudioQueueRetrySuccess; + if (changeMicSuccess) { + if (errorCode == kSCAudioQueueErrorWildCard) { + type = SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue; + } else if (errorCode == kSCAudioQueueErrorHardware) { + type = SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware; + } + } + [_delegate managedVideoCapturer:self didGetError:nil forType:type session:self.activeSession]; + } else { + error = [self _appendInfo:resultStr forInfoKey:@"retry_datasource_result" toError:error]; + SCLogVideoCapturerError(@"Retry setting audio session failed with error:%@", error); + } + return error; +} + +- (BOOL)_isBottomMicBrokenCode:(NSInteger)errorCode +{ + // we consider both -50 and 1852797029 as a broken microphone case + return (errorCode == kSCAudioQueueErrorWildCard || errorCode == kSCAudioQueueErrorHardware); +} + +- (void)_beginAudioQueueRecordingWithCompleteHandler:(audio_capture_session_block)block +{ + SCTraceStart(); + SCAssert(block, @"block can not be nil"); + @weakify(self); + void (^beginAudioBlock)(NSError *error) = ^(NSError *error) { + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + + SCTraceStart(); + NSInteger errorCode = error.code; + if ([self _isBottomMicBrokenCode:errorCode] && + (self.status == SCManagedVideoCapturerStatusReadyForRecording || + self.status == SCManagedVideoCapturerStatusRecording)) { + + SCLogVideoCapturerError(@"Start to retry begin audio queue (error code: %@)", @(errorCode)); + + // use front microphone to retry + NSDictionary *resultInfo = [[SCAudioSession sharedInstance] tryUseFrontMicWithErrorCode:errorCode]; + [self _retryRequestRecordingWithCompleteHandler:^(NSError *error) { + // then retry audio queue again + [_audioCaptureSession + beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate + completionHandler:^(NSError *innerError) { + NSError *modifyError = [self + _handleRetryBeginAudioRecordingErrorCode:errorCode + error:innerError + micResult:resultInfo]; + block(modifyError); + }]; + }]; + + } else { + block(error); + } + }]; + }; + [_audioCaptureSession beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate + completionHandler:^(NSError *error) { + beginAudioBlock(error); + }]; +} + +// This method must not change nullability of error, it should only either append info into userInfo, +// or return the NSError as it is. +- (NSError *)_appendInfo:(NSString *)infoStr forInfoKey:(NSString *)infoKey toError:(NSError *)error +{ + if (!error || infoStr.length == 0 || infoKey.length == 0 || error.domain.length == 0) { + return error; + } + NSMutableDictionary *errorInfo = [[error userInfo] mutableCopy]; + errorInfo[infoKey] = infoStr.length > 0 ? infoStr : @"(null)"; + + return [NSError errorWithDomain:error.domain code:error.code userInfo:errorInfo]; +} + +- (void)_retryRequestRecordingWithCompleteHandler:(audio_capture_session_block)block +{ + SCTraceStart(); + if (_audioConfiguration) { + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; + } + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter + configureWith:_audioConfiguration.configuration + performer:_performer + completion:^(NSError *error) { + if (error) { + [_delegate managedVideoCapturer:self + didGetError:error + forType:SCManagedVideoCapturerInfoAudioSessionError + session:sessionInfo]; + } + if (block) { + block(error); + } + }]; +} + +#pragma SCCapturerBufferedVideoWriterDelegate + +- (void)videoWriterDidFailWritingWithError:(NSError *)error +{ + // If it failed, we call the delegate method, release everything else we + // have, well, on the output queue obviously + SCTraceStart(); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [_outputURL reloadAssetKeys]; + [self _cleanup]; + [self _disposeAudioRecording]; + self.status = SCManagedVideoCapturerStatusError; + _lastError = error; + _placeholderImage = nil; + [_delegate managedVideoCapturer:self + didGetError:error + forType:SCManagedVideoCapturerInfoAssetWriterError + session:sessionInfo]; + [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; + }]; +} + +- (void)_willStopRecording +{ + if (self.status == SCManagedVideoCapturerStatusRecording) { + // To notify UI continue the preview processing + SCQueuePerformer *promisePerformer = + [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoCapturerPromiseQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + _recordedVideoPromise = [[SCPromise alloc] initWithPerformer:promisePerformer]; + [_delegate managedVideoCapturer:self + willStopWithRecordedVideoFuture:_recordedVideoPromise.future + videoSize:_outputSize + placeholderImage:_placeholderImage + session:self.activeSession]; + } +} + +- (void)_stopRecording +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @"Needs to be on the performing queue"); + // Reset stop session as well as stop time. + ++_stopSession; + _stopTime = kCFAbsoluteTimeIntervalSince1970; + SCPromise> *recordedVideoPromise = _recordedVideoPromise; + _recordedVideoPromise = nil; + sc_managed_capturer_recording_session_t sessionId = _sessionId; + if (self.status == SCManagedVideoCapturerStatusRecording) { + self.status = SCManagedVideoCapturerStatusIdle; + if (CMTIME_IS_VALID(_endSessionTime)) { + [_videoWriter + finishWritingAtSourceTime:_endSessionTime + withCompletionHanlder:^{ + // actually, make sure everything happens on outputQueue + [_performer performImmediatelyIfCurrentPerformer:^{ + if (sessionId != _sessionId) { + SCLogVideoCapturerError(@"SessionId mismatch: before: %@, after: %@", @(sessionId), + @(_sessionId)); + return; + } + [self _disposeAudioRecording]; + // Log the video snap recording success event w/ parameters, not including video + // note + if (_isVideoSnap) { + [SnapVideoMetadata logVideoEvent:kSCCameraMetricsVideoRecordingSuccess + videoSettings:_videoOutputSettings + isSave:NO]; + } + void (^stopRecordingCompletionBlock)(NSURL *) = ^(NSURL *rawDataURL) { + SCAssert([_performer isCurrentPerformer], @"Needs to be on the performing queue"); + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + + [self _cleanup]; + + [[SCLogger sharedInstance] logTimedEventStart:@"SNAP_VIDEO_SIZE_LOADING" + uniqueId:@"" + isUniqueEvent:NO]; + CGSize videoSize = + [SnapVideoMetadata videoSizeForURL:_outputURL waitWhileLoadingTracksIfNeeded:YES]; + [[SCLogger sharedInstance] logTimedEventEnd:@"SNAP_VIDEO_SIZE_LOADING" + uniqueId:@"" + parameters:nil]; + // Log error if video file is not really ready + if (videoSize.width == 0.0 || videoSize.height == 0.0) { + _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain + code:kSCManagedVideoCapturerZeroVideoSize + userInfo:nil]; + [recordedVideoPromise completeWithError:_lastError]; + [_delegate managedVideoCapturer:self + didFailWithError:_lastError + session:sessionInfo]; + _placeholderImage = nil; + return; + } + // If the video duration is too short, the future object will complete + // with error as well + SCManagedRecordedVideo *recordedVideo = + [[SCManagedRecordedVideo alloc] initWithVideoURL:_outputURL + rawVideoDataFileURL:_rawDataURL + placeholderImage:_placeholderImage + isFrontFacingCamera:_isFrontFacingCamera]; + [recordedVideoPromise completeWithValue:recordedVideo]; + [_delegate managedVideoCapturer:self + didSucceedWithRecordedVideo:recordedVideo + session:sessionInfo]; + _placeholderImage = nil; + }; + + if (_videoFrameRawDataCollector) { + [_videoFrameRawDataCollector + drainFrameDataCollectionWithCompletionHandler:^(NSURL *rawDataURL) { + stopRecordingCompletionBlock(rawDataURL); + }]; + } else { + stopRecordingCompletionBlock(nil); + } + }]; + }]; + + } else { + [self _disposeAudioRecording]; + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [self _cleanup]; + self.status = SCManagedVideoCapturerStatusError; + _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain + code:kSCManagedVideoCapturerEmptyFrame + userInfo:nil]; + _placeholderImage = nil; + [recordedVideoPromise completeWithError:_lastError]; + [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; + } + } else { + if (self.status == SCManagedVideoCapturerStatusPrepareToRecord || + self.status == SCManagedVideoCapturerStatusReadyForRecording) { + _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain + code:kSCManagedVideoCapturerStopBeforeStart + userInfo:nil]; + } else { + _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain + code:kSCManagedVideoCapturerStopWithoutStart + userInfo:nil]; + } + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [self _cleanup]; + _placeholderImage = nil; + if (_audioConfiguration) { + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; + _audioConfiguration = nil; + } + [recordedVideoPromise completeWithError:_lastError]; + [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; + self.status = SCManagedVideoCapturerStatusIdle; + [_capturerLogger logEventIfStartingTooSlow]; + } +} + +- (void)stopRecordingAsynchronously +{ + SCTraceStart(); + NSTimeInterval stopTime = CACurrentMediaTime(); + [_performer performImmediatelyIfCurrentPerformer:^{ + _stopTime = stopTime; + NSInteger stopSession = _stopSession; + [self _willStopRecording]; + [_performer perform:^{ + // If we haven't stopped yet, call the stop now nevertheless. + if (stopSession == _stopSession) { + [self _stopRecording]; + } + } + after:kSCManagedVideoCapturerStopRecordingDeadline]; + }]; +} + +- (void)cancelRecordingAsynchronously +{ + SCTraceStart(); + [_performer performImmediatelyIfCurrentPerformer:^{ + SCTraceStart(); + SCLogVideoCapturerInfo(@"Cancel recording. status: %lu", (unsigned long)self.status); + if (self.status == SCManagedVideoCapturerStatusRecording) { + self.status = SCManagedVideoCapturerStatusIdle; + [self _disposeAudioRecording]; + [_videoWriter cancelWriting]; + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [self _cleanup]; + _placeholderImage = nil; + [_delegate managedVideoCapturer:self didCancelVideoRecording:sessionInfo]; + } else if ((self.status == SCManagedVideoCapturerStatusPrepareToRecord) || + (self.status == SCManagedVideoCapturerStatusReadyForRecording)) { + SCVideoCaptureSessionInfo sessionInfo = self.activeSession; + [self _cleanup]; + self.status = SCManagedVideoCapturerStatusIdle; + _placeholderImage = nil; + if (_audioConfiguration) { + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration + performer:nil + completion:nil]; + _audioConfiguration = nil; + } + [_delegate managedVideoCapturer:self didCancelVideoRecording:sessionInfo]; + } + [_capturerLogger logEventIfStartingTooSlow]; + }]; +} + +- (void)addTimedTask:(SCTimedTask *)task +{ + [_performer performImmediatelyIfCurrentPerformer:^{ + // Only allow to add observers when we are not recording. + if (!self->_timeObserver) { + self->_timeObserver = [SCManagedVideoCapturerTimeObserver new]; + } + [self->_timeObserver addTimedTask:task]; + SCLogVideoCapturerInfo(@"Added timetask: %@", task); + }]; +} + +- (void)clearTimedTasks +{ + // _timeObserver will be initialized lazily when adding timed tasks + SCLogVideoCapturerInfo(@"Clearing time observer"); + [_performer performImmediatelyIfCurrentPerformer:^{ + if (self->_timeObserver) { + self->_timeObserver = nil; + } + }]; +} + +- (void)_cleanup +{ + [_videoWriter cleanUp]; + _timeObserver = nil; + + SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo before cleanup: %@", + SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); + + _startSessionTime = kCMTimeInvalid; + _endSessionTime = kCMTimeInvalid; + _firstWrittenAudioBufferDelay = kCMTimeInvalid; + _sessionId = 0; + _captureSessionID = nil; + _audioQueueStarted = NO; +} + +- (void)_disposeAudioRecording +{ + SCLogVideoCapturerInfo(@"Disposing audio recording"); + SCAssert([_performer isCurrentPerformer], @""); + // Setup the audio session token correctly + SCAudioConfigurationToken *audioConfiguration = _audioConfiguration; + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:_captureSessionID + stepName:@"audio_queue_stop_begin"]; + NSString *captureSessionID = _captureSessionID; + [_audioCaptureSession disposeAudioRecordingSynchronouslyWithCompletionHandler:^{ + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:captureSessionID + stepName:@"audio_queue_stop_end"]; + SCLogVideoCapturerInfo(@"Did dispose audio recording"); + if (audioConfiguration) { + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:captureSessionID + stepName:@"audio_session_stop_begin"]; + [SCAudioSessionExperimentAdapter + relinquishConfiguration:audioConfiguration + performer:_performer + completion:^(NSError *_Nullable error) { + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay + uniqueId:captureSessionID + stepName:@"audio_session_stop_end"]; + [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsAudioDelay + uniqueId:captureSessionID + parameters:nil]; + }]; + } + }]; + _audioConfiguration = nil; +} + +- (CIContext *)ciContext +{ + if (!_ciContext) { + _ciContext = [CIContext contextWithOptions:nil]; + } + return _ciContext; +} + +#pragma mark - SCAudioCaptureSessionDelegate + +- (void)audioCaptureSession:(SCAudioCaptureSession *)audioCaptureSession + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + SCTraceStart(); + if (self.status != SCManagedVideoCapturerStatusRecording) { + return; + } + CFRetain(sampleBuffer); + [_performer performImmediatelyIfCurrentPerformer:^{ + if (self.status == SCManagedVideoCapturerStatusRecording) { + // Audio always follows video, there is no other way around this :) + if (_hasWritten && CACurrentMediaTime() - _recordStartTime <= _maxDuration) { + [self _processAudioSampleBuffer:sampleBuffer]; + [_videoWriter appendAudioSampleBuffer:sampleBuffer]; + } + } + CFRelease(sampleBuffer); + }]; +} + +#pragma mark - SCManagedVideoDataSourceListener + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + if (self.status != SCManagedVideoCapturerStatusRecording) { + return; + } + CFRetain(sampleBuffer); + [_performer performImmediatelyIfCurrentPerformer:^{ + // the following check will allow the capture pipeline to drain + if (CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) > _stopTime) { + [self _stopRecording]; + } else { + if (self.status == SCManagedVideoCapturerStatusRecording) { + _isFrontFacingCamera = (devicePosition == SCManagedCaptureDevicePositionFront); + CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + if (CMTIME_IS_VALID(presentationTime)) { + SCLogVideoCapturerInfo(@"Obtained video data source at time %lld", presentationTime.value); + } else { + SCLogVideoCapturerInfo(@"Obtained video data source with an invalid time"); + } + if (!_hasWritten) { + // Start writing! + [_videoWriter startWritingAtSourceTime:presentationTime]; + [_capturerLogger endLoggingForStarting]; + _startSessionTime = presentationTime; + _startSessionRealTime = CACurrentMediaTime(); + SCLogVideoCapturerInfo(@"First frame processed %f seconds after presentation Time", + _startSessionRealTime - CMTimeGetSeconds(presentationTime)); + _hasWritten = YES; + [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CMTimeGetSeconds(presentationTime)]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CMTimeGetSeconds( + presentationTime)]; + SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo after first frame: %@", + SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); + } + // Only respect video end session time, audio can be cut off, not video, + // not video + if (CMTIME_IS_INVALID(_endSessionTime)) { + _endSessionTime = presentationTime; + } else { + _endSessionTime = CMTimeMaximum(_endSessionTime, presentationTime); + } + if (CACurrentMediaTime() - _recordStartTime <= _maxDuration) { + [_videoWriter appendVideoSampleBuffer:sampleBuffer]; + [self _processVideoSampleBuffer:sampleBuffer]; + } + if (_timeObserver) { + [_timeObserver processTime:CMTimeSubtract(presentationTime, _startSessionTime) + sessionStartTimeDelayInSecond:_startSessionRealTime - CMTimeGetSeconds(_startSessionTime)]; + } + } + } + CFRelease(sampleBuffer); + }]; +} + +- (void)_generatePlaceholderImageWithPixelBuffer:(CVImageBufferRef)pixelBuffer metaData:(NSDictionary *)metadata +{ + SCTraceStart(); + CVImageBufferRef imageBuffer = CVPixelBufferRetain(pixelBuffer); + if (imageBuffer) { + dispatch_async(SCPlaceholderImageGenerationQueue(), ^{ + UIImage *placeholderImage = [UIImage imageWithPixelBufferRef:imageBuffer + backingType:UIImageBackingTypeCGImage + orientation:UIImageOrientationRight + context:[self ciContext]]; + placeholderImage = + SCCropImageToTargetAspectRatio(placeholderImage, SCManagedCapturedImageAndVideoAspectRatio()); + [_performer performImmediatelyIfCurrentPerformer:^{ + // After processing, assign it back. + if (self.status == SCManagedVideoCapturerStatusRecording) { + _placeholderImage = placeholderImage; + // Check video frame health by placeholder image + [[SCManagedFrameHealthChecker sharedInstance] + checkVideoHealthForCaptureFrameImage:placeholderImage + metedata:metadata + captureSessionID:_captureSessionID]; + } + CVPixelBufferRelease(imageBuffer); + }]; + }); + } +} + +#pragma mark - Pixel Buffer methods + +- (void)_processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + SC_GUARD_ELSE_RETURN(sampleBuffer); + CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + BOOL shouldGeneratePlaceholderImage = CMTimeCompare(presentationTime, _startSessionTime) == 0; + + CVImageBufferRef outputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if (outputPixelBuffer) { + [self _addVideoRawDataWithPixelBuffer:outputPixelBuffer]; + if (shouldGeneratePlaceholderImage) { + NSDictionary *extraInfo = [_delegate managedVideoCapturerGetExtraFrameHealthInfo:self]; + NSDictionary *metadata = + [[[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer extraInfo:extraInfo] + copy]; + [self _generatePlaceholderImageWithPixelBuffer:outputPixelBuffer metaData:metadata]; + } + } + + [_delegate managedVideoCapturer:self + didAppendVideoSampleBuffer:sampleBuffer + presentationTimestamp:CMTimeSubtract(presentationTime, _startSessionTime)]; +} + +- (void)_processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + [_announcer managedAudioDataSource:self didOutputSampleBuffer:sampleBuffer]; + if (!CMTIME_IS_VALID(self.firstWrittenAudioBufferDelay)) { + self.firstWrittenAudioBufferDelay = + CMTimeSubtract(CMSampleBufferGetPresentationTimeStamp(sampleBuffer), _startSessionTime); + } +} + +- (void)_addVideoRawDataWithPixelBuffer:(CVImageBufferRef)pixelBuffer +{ + if (_videoFrameRawDataCollector && [SnapVideoMetadata deviceMeetsRequirementsForContentAdaptiveVideoEncoding] && + ((_rawDataFrameNum % kSCVideoContentComplexitySamplingRate) == 0) && (_rawDataFrameNum > 0)) { + if (_videoFrameRawDataCollector) { + CVImageBufferRef imageBuffer = CVPixelBufferRetain(pixelBuffer); + [_videoFrameRawDataCollector collectVideoFrameRawDataWithImageBuffer:imageBuffer + frameNum:_rawDataFrameNum + completion:^{ + CVPixelBufferRelease(imageBuffer); + }]; + } + } + _rawDataFrameNum++; +} + +#pragma mark - SCManagedAudioDataSource + +- (void)addListener:(id)listener +{ + [_announcer addListener:listener]; +} + +- (void)removeListener:(id)listener +{ + [_announcer removeListener:listener]; +} + +- (void)startStreamingWithAudioConfiguration:(SCAudioConfiguration *)configuration +{ + SCAssertFail(@"Controlled by recorder"); +} + +- (void)stopStreaming +{ + SCAssertFail(@"Controlled by recorder"); +} + +- (BOOL)isStreaming +{ + return self.status == SCManagedVideoCapturerStatusRecording; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerHandler.h b/ManagedCapturer/SCManagedVideoCapturerHandler.h new file mode 100644 index 0000000..1c55cea --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerHandler.h @@ -0,0 +1,20 @@ +// +// SCManagedVideoCapturerHandler.h +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedVideoCapturer.h" + +#import + +@class SCCaptureResource; + +@interface SCManagedVideoCapturerHandler : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerHandler.m b/ManagedCapturer/SCManagedVideoCapturerHandler.m new file mode 100644 index 0000000..7c4866e --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerHandler.m @@ -0,0 +1,252 @@ +// +// SCManagedVideoCapturerHandler.m +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedVideoCapturerHandler.h" + +#import "SCCaptureResource.h" +#import "SCManagedCaptureDevice+SCManagedCapturer.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerLensAPI.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerSampleMetadata.h" +#import "SCManagedCapturerState.h" +#import "SCManagedDeviceCapacityAnalyzer.h" +#import "SCManagedFrontFlashController.h" +#import "SCManagedVideoFileStreamer.h" +#import "SCManagedVideoFrameSampler.h" +#import "SCManagedVideoStreamer.h" + +#import +#import +#import +#import +#import + +@interface SCManagedVideoCapturerHandler () { + __weak SCCaptureResource *_captureResource; +} +@end + +@implementation SCManagedVideoCapturerHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + SCAssert(captureResource, @""); + _captureResource = captureResource; + } + return self; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did begin video recording. sessionId:%u", sessionInfo.sessionId); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didBeginVideoRecording:state + session:sessionInfo]; + }); + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did begin audio recording. sessionId:%u", sessionInfo.sessionId); + [_captureResource.queuePerformer perform:^{ + if ([_captureResource.fileInputDecider shouldProcessFileInput]) { + [_captureResource.videoDataSource startStreaming]; + } + SCTraceStart(); + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didBeginAudioRecording:state + session:sessionInfo]; + }); + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + willStopWithRecordedVideoFuture:(SCFuture> *)recordedVideoFuture + videoSize:(CGSize)videoSize + placeholderImage:(UIImage *)placeholderImage + session:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Will stop recording. sessionId:%u placeHolderImage:%@ videoSize:(%f, %f)", + sessionInfo.sessionId, placeholderImage, videoSize.width, videoSize.height); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.videoRecording) { + SCManagedCapturerState *state = [_captureResource.state copy]; + // Then, sync back to main thread to notify will finish recording + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + willFinishRecording:state + session:sessionInfo + recordedVideoFuture:recordedVideoFuture + videoSize:videoSize + placeholderImage:placeholderImage]; + }); + } + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo + session:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did succeed recording. sessionId:%u recordedVideo:%@", sessionInfo.sessionId, recordedVideo); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.videoRecording) { + [self _videoRecordingCleanup]; + SCManagedCapturerState *state = [_captureResource.state copy]; + // Then, sync back to main thread to notify the finish recording + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didFinishRecording:state + session:sessionInfo + recordedVideo:recordedVideo]; + }); + } + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didFailWithError:(NSError *)error + session:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did fail recording. sessionId:%u", sessionInfo.sessionId); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.videoRecording) { + [self _videoRecordingCleanup]; + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didFailRecording:state + session:sessionInfo + error:error]; + }); + } + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did cancel recording. sessionId:%u", sessionInfo.sessionId); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.videoRecording) { + [self _videoRecordingCleanup]; + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didCancelRecording:state + session:sessionInfo]; + }); + } + }]; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didGetError:(NSError *)error + forType:(SCManagedVideoCapturerInfoType)type + session:(SCVideoCaptureSessionInfo)sessionInfo +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Did get error. sessionId:%u errorType:%lu, error:%@", sessionInfo.sessionId, (long)type, error); + [_captureResource.queuePerformer perform:^{ + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didGetError:error + forType:type + session:sessionInfo]; + }); + }]; +} + +- (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer +{ + SCTraceODPCompatibleStart(2); + if (_captureResource.state.lensesActive) { + return @{ + @"lens_active" : @(YES), + @"lens_id" : ([_captureResource.lensProcessingCore activeLensId] ?: [NSNull null]) + }; + } + return nil; +} + +- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer + didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer + presentationTimestamp:(CMTime)presentationTimestamp +{ + CFRetain(sampleBuffer); + [_captureResource.queuePerformer perform:^{ + SCManagedCapturerSampleMetadata *sampleMetadata = + [[SCManagedCapturerSampleMetadata alloc] initWithPresentationTimestamp:presentationTimestamp + fieldOfView:_captureResource.device.fieldOfView]; + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didAppendVideoSampleBuffer:sampleBuffer + sampleMetadata:sampleMetadata]; + CFRelease(sampleBuffer); + }]; +} + +- (void)_videoRecordingCleanup +{ + SCTraceODPCompatibleStart(2); + SCAssert(_captureResource.videoRecording, @"clean up function only can be called if the " + @"video recording is still in progress."); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + SCLogCapturerInfo(@"Video recording cleanup. previous state:%@", _captureResource.state); + [_captureResource.videoDataSource removeListener:_captureResource.videoCapturer]; + if (_captureResource.videoFrameSampler) { + SCManagedVideoFrameSampler *sampler = _captureResource.videoFrameSampler; + _captureResource.videoFrameSampler = nil; + [_captureResource.announcer removeListener:sampler]; + } + // Add back other listeners to video streamer + [_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer]; + if (!_captureResource.state.torchActive) { + // We should turn off torch for the device that we specifically turned on + // for recording + [_captureResource.device setTorchActive:NO]; + if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { + _captureResource.frontFlashController.torchActive = NO; + } + } + + // Unlock focus on both front and back camera if they were locked. + // Even if ARKit was being used during recording, it'll be shut down by the time we get here + // So DON'T match the ARKit check we use around [_ setRecording:YES] + SCManagedCaptureDevice *front = [SCManagedCaptureDevice front]; + SCManagedCaptureDevice *back = [SCManagedCaptureDevice back]; + [front setRecording:NO]; + [back setRecording:NO]; + _captureResource.videoRecording = NO; + if (_captureResource.state.lensesActive) { + BOOL modifySource = _captureResource.videoRecording || _captureResource.state.liveVideoStreaming; + [_captureResource.lensProcessingCore setModifySource:modifySource]; + } +} + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerLogger.h b/ManagedCapturer/SCManagedVideoCapturerLogger.h new file mode 100644 index 0000000..f37b24e --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerLogger.h @@ -0,0 +1,27 @@ +// +// SCCaptureLogger.h +// Snapchat +// +// Created by Pinlin on 12/04/2017. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import + +static NSString *const kSCCapturerStartingStepAudioSession = @"audio_session"; +static NSString *const kSCCapturerStartingStepTranscodeingVideoBitrate = @"transcoding_video_bitrate"; +static NSString *const kSCCapturerStartingStepOutputSettings = @"output_settings"; +static NSString *const kSCCapturerStartingStepVideoFrameRawData = @"video_frame_raw_data"; +static NSString *const kSCCapturerStartingStepAudioRecording = @"audio_recording"; +static NSString *const kSCCapturerStartingStepAssetWriterConfiguration = @"asset_writer_config"; +static NSString *const kSCCapturerStartingStepStartingWriting = @"start_writing"; +static NSString *const kCapturerStartingTotalDelay = @"total_delay"; + +@interface SCManagedVideoCapturerLogger : NSObject + +- (void)prepareForStartingLog; +- (void)logStartingStep:(NSString *)stepName; +- (void)endLoggingForStarting; +- (void)logEventIfStartingTooSlow; + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerLogger.m b/ManagedCapturer/SCManagedVideoCapturerLogger.m new file mode 100644 index 0000000..2e5ad96 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerLogger.m @@ -0,0 +1,77 @@ +// +// SCManagedVideoCapturerLogger.m +// Snapchat +// +// Created by Pinlin on 12/04/2017. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoCapturerLogger.h" + +#import +#import +#import +#import + +@import QuartzCore; + +@interface SCManagedVideoCapturerLogger () { + // For time profiles metric during start recording + NSMutableDictionary *_startingStepsDelayTime; + NSTimeInterval _beginStartTime; + NSTimeInterval _lastCheckpointTime; + NSTimeInterval _startedTime; +} + +@end + +@implementation SCManagedVideoCapturerLogger + +- (instancetype)init +{ + self = [super init]; + if (self) { + _startingStepsDelayTime = [NSMutableDictionary dictionary]; + } + return self; +} + +- (void)prepareForStartingLog +{ + _beginStartTime = CACurrentMediaTime(); + _lastCheckpointTime = _beginStartTime; + [_startingStepsDelayTime removeAllObjects]; +} + +- (void)logStartingStep:(NSString *)stepname +{ + SCAssert(_beginStartTime > 0, @"logger is not ready yet, please call prepareForStartingLog at first"); + NSTimeInterval currentCheckpointTime = CACurrentMediaTime(); + _startingStepsDelayTime[stepname] = @(currentCheckpointTime - _lastCheckpointTime); + _lastCheckpointTime = currentCheckpointTime; +} + +- (void)endLoggingForStarting +{ + SCAssert(_beginStartTime > 0, @"logger is not ready yet, please call prepareForStartingLog at first"); + _startedTime = CACurrentMediaTime(); + [self logStartingStep:kSCCapturerStartingStepStartingWriting]; + _startingStepsDelayTime[kCapturerStartingTotalDelay] = @(CACurrentMediaTime() - _beginStartTime); +} + +- (void)logEventIfStartingTooSlow +{ + if (_beginStartTime > 0) { + if (_startingStepsDelayTime.count == 0) { + // It should not be here. We only need to log once. + return; + } + SCLogGeneralWarning(@"Capturer starting delay(in second):%f", _startedTime - _beginStartTime); + [[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoCapturerStartDelay parameters:_startingStepsDelayTime]; + // Clean all delay times after logging + [_startingStepsDelayTime removeAllObjects]; + _beginStartTime = 0; + } +} + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerOutputSettings.h b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.h new file mode 100644 index 0000000..693894e --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.h @@ -0,0 +1,48 @@ +// 42f6113daff3eebf06d809a073c99651867c42ea +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedVideoCapturerOutputType.h" + +#import + +#import +#import + +@protocol SCManagedVideoCapturerOutputSettings + +@property (nonatomic, assign, readonly) CGFloat width; + +@property (nonatomic, assign, readonly) CGFloat height; + +@property (nonatomic, assign, readonly) CGFloat videoBitRate; + +@property (nonatomic, assign, readonly) CGFloat audioBitRate; + +@property (nonatomic, assign, readonly) NSUInteger keyFrameInterval; + +@property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType; + +@end + +@interface SCManagedVideoCapturerOutputSettings : NSObject + +@property (nonatomic, assign, readonly) CGFloat width; + +@property (nonatomic, assign, readonly) CGFloat height; + +@property (nonatomic, assign, readonly) CGFloat videoBitRate; + +@property (nonatomic, assign, readonly) CGFloat audioBitRate; + +@property (nonatomic, assign, readonly) NSUInteger keyFrameInterval; + +@property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType; + +- (instancetype)initWithWidth:(CGFloat)width + height:(CGFloat)height + videoBitRate:(CGFloat)videoBitRate + audioBitRate:(CGFloat)audioBitRate + keyFrameInterval:(NSUInteger)keyFrameInterval + outputType:(SCManagedVideoCapturerOutputType)outputType; + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerOutputSettings.m b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.m new file mode 100644 index 0000000..275e33d --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.m @@ -0,0 +1,221 @@ +// 42f6113daff3eebf06d809a073c99651867c42ea +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedVideoCapturerOutputSettings.h" + +#import + +#import + +@implementation SCManagedVideoCapturerOutputSettings + +static ptrdiff_t sSCManagedVideoCapturerOutputSettingsOffsets[0]; +static BOOL sSCManagedVideoCapturerOutputSettingsHasOffsets; + +- (instancetype)initWithWidth:(CGFloat)width + height:(CGFloat)height + videoBitRate:(CGFloat)videoBitRate + audioBitRate:(CGFloat)audioBitRate + keyFrameInterval:(NSUInteger)keyFrameInterval + outputType:(SCManagedVideoCapturerOutputType)outputType +{ + self = [super init]; + if (self) { + _width = width; + _height = height; + _videoBitRate = videoBitRate; + _audioBitRate = audioBitRate; + _keyFrameInterval = keyFrameInterval; + _outputType = outputType; + } + return self; +} + +#pragma mark - NSCopying + +- (instancetype)copyWithZone:(NSZone *)zone +{ + // Immutable object, bypass copy + return self; +} + +#pragma mark - NSCoding + +- (instancetype)initWithCoder:(NSCoder *)aDecoder +{ + self = [super init]; + if (self) { + _width = [aDecoder decodeFloatForKey:@"width"]; + _height = [aDecoder decodeFloatForKey:@"height"]; + _videoBitRate = [aDecoder decodeFloatForKey:@"videoBitRate"]; + _audioBitRate = [aDecoder decodeFloatForKey:@"audioBitRate"]; + _keyFrameInterval = [[aDecoder decodeObjectForKey:@"keyFrameInterval"] unsignedIntegerValue]; + _outputType = (SCManagedVideoCapturerOutputType)[aDecoder decodeIntegerForKey:@"outputType"]; + } + return self; +} + +- (void)encodeWithCoder:(NSCoder *)aCoder +{ + [aCoder encodeFloat:_width forKey:@"width"]; + [aCoder encodeFloat:_height forKey:@"height"]; + [aCoder encodeFloat:_videoBitRate forKey:@"videoBitRate"]; + [aCoder encodeFloat:_audioBitRate forKey:@"audioBitRate"]; + [aCoder encodeObject:@(_keyFrameInterval) forKey:@"keyFrameInterval"]; + [aCoder encodeInteger:(NSInteger)_outputType forKey:@"outputType"]; +} + +#pragma mark - FasterCoding + +- (BOOL)preferFasterCoding +{ + return YES; +} + +- (void)encodeWithFasterCoder:(id)fasterCoder +{ + [fasterCoder encodeFloat64:_audioBitRate]; + [fasterCoder encodeFloat64:_height]; + [fasterCoder encodeUInt64:_keyFrameInterval]; + [fasterCoder encodeSInt32:_outputType]; + [fasterCoder encodeFloat64:_videoBitRate]; + [fasterCoder encodeFloat64:_width]; +} + +- (void)decodeWithFasterDecoder:(id)fasterDecoder +{ + _audioBitRate = (CGFloat)[fasterDecoder decodeFloat64]; + _height = (CGFloat)[fasterDecoder decodeFloat64]; + _keyFrameInterval = (NSUInteger)[fasterDecoder decodeUInt64]; + _outputType = (SCManagedVideoCapturerOutputType)[fasterDecoder decodeSInt32]; + _videoBitRate = (CGFloat)[fasterDecoder decodeFloat64]; + _width = (CGFloat)[fasterDecoder decodeFloat64]; +} + +- (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 54425104364133881ULL: + _outputType = (SCManagedVideoCapturerOutputType)val; + break; + } +} + +- (void)setUInt64:(uint64_t)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 47327990652274883ULL: + _keyFrameInterval = (NSUInteger)val; + break; + } +} + +- (void)setFloat64:(double)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 50995534680662654ULL: + _audioBitRate = (CGFloat)val; + break; + case 11656660716170763ULL: + _height = (CGFloat)val; + break; + case 29034524155663716ULL: + _videoBitRate = (CGFloat)val; + break; + case 30689178641753681ULL: + _width = (CGFloat)val; + break; + } +} + ++ (uint64_t)fasterCodingVersion +{ + return 14709152111692666517ULL; +} + ++ (uint64_t *)fasterCodingKeys +{ + static uint64_t keys[] = { + 6 /* Total */, + FC_ENCODE_KEY_TYPE(50995534680662654, FCEncodeTypeFloat64), + FC_ENCODE_KEY_TYPE(11656660716170763, FCEncodeTypeFloat64), + FC_ENCODE_KEY_TYPE(47327990652274883, FCEncodeTypeUInt64), + FC_ENCODE_KEY_TYPE(54425104364133881, FCEncodeTypeSInt32), + FC_ENCODE_KEY_TYPE(29034524155663716, FCEncodeTypeFloat64), + FC_ENCODE_KEY_TYPE(30689178641753681, FCEncodeTypeFloat64), + }; + return keys; +} + +#pragma mark - isEqual + +- (BOOL)isEqual:(id)object +{ + if (!SCObjectsIsEqual(self, object, &sSCManagedVideoCapturerOutputSettingsHasOffsets, + sSCManagedVideoCapturerOutputSettingsOffsets, 6, 0)) { + return NO; + } + SCManagedVideoCapturerOutputSettings *other = (SCManagedVideoCapturerOutputSettings *)object; + if (other->_width != _width) { + return NO; + } + + if (other->_height != _height) { + return NO; + } + + if (other->_videoBitRate != _videoBitRate) { + return NO; + } + + if (other->_audioBitRate != _audioBitRate) { + return NO; + } + + if (other->_keyFrameInterval != _keyFrameInterval) { + return NO; + } + + if (other->_outputType != _outputType) { + return NO; + } + + return YES; +} + +- (NSUInteger)hash +{ + NSUInteger subhashes[] = {(NSUInteger)_width, (NSUInteger)_height, (NSUInteger)_videoBitRate, + (NSUInteger)_audioBitRate, (NSUInteger)_keyFrameInterval, (NSUInteger)_outputType}; + NSUInteger result = subhashes[0]; + for (int i = 1; i < 6; i++) { + unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); + base = (~base) + (base << 18); + base ^= (base >> 31); + base *= 21; + base ^= (base >> 11); + base += (base << 6); + base ^= (base >> 22); + result = (NSUInteger)base; + } + return result; +} + +#pragma mark - Print description in console: lldb> po #{variable name} + +- (NSString *)description +{ + NSMutableString *desc = [NSMutableString string]; + [desc appendString:@"{\n"]; + [desc appendFormat:@"\twidth:%@\n", [@(_width) description]]; + [desc appendFormat:@"\theight:%@\n", [@(_height) description]]; + [desc appendFormat:@"\tvideoBitRate:%@\n", [@(_videoBitRate) description]]; + [desc appendFormat:@"\taudioBitRate:%@\n", [@(_audioBitRate) description]]; + [desc appendFormat:@"\tkeyFrameInterval:%@\n", [@(_keyFrameInterval) description]]; + [desc appendFormat:@"\toutputType:%@\n", [@(_outputType) description]]; + [desc appendString:@"}\n"]; + + return [desc copy]; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerOutputSettings.value b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.value new file mode 100644 index 0000000..a9fa2f3 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerOutputSettings.value @@ -0,0 +1,10 @@ +#import "SCManagedVideoCapturerOutputType.h" + +interface SCManagedVideoCapturerOutputSettings + CGFloat width + CGFloat height + CGFloat videoBitRate + CGFloat audioBitRate + NSUInteger keyFrameInterval + enum SCManagedVideoCapturerOutputType outputType +end \ No newline at end of file diff --git a/ManagedCapturer/SCManagedVideoCapturerOutputType.h b/ManagedCapturer/SCManagedVideoCapturerOutputType.h new file mode 100644 index 0000000..d033f4f --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerOutputType.h @@ -0,0 +1,14 @@ +// +// SCManagedVideoCapturerOutputType.h +// Snapchat +// +// Created by Chao Pang on 8/8/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import + +typedef NS_ENUM(NSInteger, SCManagedVideoCapturerOutputType) { + SCManagedVideoCapturerOutputTypeVideoSnap = 0, + SCManagedVideoCapturerOutputTypeVideoNote, +}; diff --git a/ManagedCapturer/SCManagedVideoCapturerTimeObserver.h b/ManagedCapturer/SCManagedVideoCapturerTimeObserver.h new file mode 100644 index 0000000..80c9def --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerTimeObserver.h @@ -0,0 +1,25 @@ +// +// SCManagedVideoCapturerTimeObserver.h +// Snapchat +// +// Created by Michel Loenngren on 4/3/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import +#import + +@class SCTimedTask; + +/* + Class keeping track of SCTimedTasks and firing them on the main thread + when needed. + */ +@interface SCManagedVideoCapturerTimeObserver : NSObject + +- (void)addTimedTask:(SCTimedTask *_Nonnull)task; + +- (void)processTime:(CMTime)relativePresentationTime + sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond; + +@end diff --git a/ManagedCapturer/SCManagedVideoCapturerTimeObserver.m b/ManagedCapturer/SCManagedVideoCapturerTimeObserver.m new file mode 100644 index 0000000..5b16547 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoCapturerTimeObserver.m @@ -0,0 +1,61 @@ +// +// SCManagedVideoCapturerTimeObserver.m +// Snapchat +// +// Created by Michel Loenngren on 4/3/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoCapturerTimeObserver.h" + +#import "SCTimedTask.h" + +#import +#import +#import + +@implementation SCManagedVideoCapturerTimeObserver { + NSMutableArray *_tasks; + BOOL _isProcessing; +} + +- (instancetype)init +{ + if (self = [super init]) { + _tasks = [NSMutableArray new]; + _isProcessing = NO; + } + return self; +} + +- (void)addTimedTask:(SCTimedTask *_Nonnull)task +{ + SCAssert(!_isProcessing, + @"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask after streaming started."); + SCAssert(CMTIME_IS_VALID(task.targetTime), + @"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask with invalid time."); + [_tasks addObject:task]; + [_tasks sortUsingComparator:^NSComparisonResult(SCTimedTask *_Nonnull obj1, SCTimedTask *_Nonnull obj2) { + return (NSComparisonResult)CMTimeCompare(obj2.targetTime, obj1.targetTime); + }]; + SCLogGeneralInfo(@"[SCManagedVideoCapturerTimeObserver] Adding task: %@, task count: %lu", task, + (unsigned long)_tasks.count); +} + +- (void)processTime:(CMTime)relativePresentationTime + sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond +{ + _isProcessing = YES; + SCTimedTask *last = _tasks.lastObject; + while (last && last.task && CMTimeCompare(relativePresentationTime, last.targetTime) >= 0) { + [_tasks removeLastObject]; + void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelay) = last.task; + last.task = nil; + runOnMainThreadAsynchronously(^{ + task(relativePresentationTime, sessionStartTimeDelayInSecond); + }); + last = _tasks.lastObject; + } +} + +@end diff --git a/ManagedCapturer/SCManagedVideoFileStreamer.h b/ManagedCapturer/SCManagedVideoFileStreamer.h new file mode 100644 index 0000000..6ede4ea --- /dev/null +++ b/ManagedCapturer/SCManagedVideoFileStreamer.h @@ -0,0 +1,26 @@ +// +// SCManagedVideoFileStreamer.h +// Snapchat +// +// Created by Alexander Grytsiuk on 3/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import + +#import +#import + +typedef void (^sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)(CVPixelBufferRef pixelBuffer); + +/** + * SCManagedVideoFileStreamer reads a video file from provided NSURL to create + * and publish video output frames. SCManagedVideoFileStreamer also conforms + * to SCManagedVideoDataSource allowing chained consumption of video frames. + */ +@interface SCManagedVideoFileStreamer : NSObject + +- (instancetype)initWithPlaybackForURL:(NSURL *)URL; +- (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion; + +@end diff --git a/ManagedCapturer/SCManagedVideoFileStreamer.m b/ManagedCapturer/SCManagedVideoFileStreamer.m new file mode 100644 index 0000000..aed6089 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoFileStreamer.m @@ -0,0 +1,299 @@ +// +// SCManagedVideoFileStreamer.m +// Snapchat +// +// Created by Alexander Grytsiuk on 3/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoFileStreamer.h" + +#import "SCManagedCapturePreviewLayerController.h" + +#import +#import +#import +#import +#import + +@import AVFoundation; +@import CoreMedia; + +static char *const kSCManagedVideoFileStreamerQueueLabel = "com.snapchat.managed-video-file-streamer"; + +@interface SCManagedVideoFileStreamer () +@end + +@implementation SCManagedVideoFileStreamer { + SCManagedVideoDataSourceListenerAnnouncer *_announcer; + SCManagedCaptureDevicePosition _devicePosition; + sc_managed_video_file_streamer_pixel_buffer_completion_handler_t _nextPixelBufferHandler; + + id _notificationToken; + id _performer; + dispatch_semaphore_t _semaphore; + + CADisplayLink *_displayLink; + AVPlayerItemVideoOutput *_videoOutput; + AVPlayer *_player; + + BOOL _sampleBufferDisplayEnabled; + id _sampleBufferDisplayController; +} + +@synthesize isStreaming = _isStreaming; +@synthesize performer = _performer; +@synthesize videoOrientation = _videoOrientation; + +- (instancetype)initWithPlaybackForURL:(NSURL *)URL +{ + SCTraceStart(); + self = [super init]; + if (self) { + _videoOrientation = AVCaptureVideoOrientationLandscapeRight; + _announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init]; + _semaphore = dispatch_semaphore_create(1); + _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoFileStreamerQueueLabel + qualityOfService:QOS_CLASS_UNSPECIFIED + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextStories]; + + // Setup CADisplayLink which will callback displayPixelBuffer: at every vsync. + _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)]; + [_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode]; + [_displayLink setPaused:YES]; + + // Prepare player + _player = [[SCPlayer alloc] initWithPlayerDomain:SCPlayerDomainCameraFileStreamer URL:URL]; +#if TARGET_IPHONE_SIMULATOR + _player.volume = 0.0; +#endif + // Configure output + [self configureOutput]; + } + return self; +} + +- (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController +{ + _sampleBufferDisplayController = sampleBufferDisplayController; +} + +- (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled +{ + _sampleBufferDisplayEnabled = sampleBufferDisplayEnabled; + SCLogGeneralInfo(@"[SCManagedVideoFileStreamer] sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled); +} + +- (void)setKeepLateFrames:(BOOL)keepLateFrames +{ + // Do nothing +} + +- (BOOL)getKeepLateFrames +{ + // return default NO value + return NO; +} + +- (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler +{ + SCAssert(queue, @"callback queue must be provided"); + SCAssert(completionHandler, @"completion handler must be provided"); + dispatch_async(queue, completionHandler); +} + +- (void)startStreaming +{ + SCTraceStart(); + if (!_isStreaming) { + _isStreaming = YES; + [self addDidPlayToEndTimeNotificationForPlayerItem:_player.currentItem]; + [_player play]; + } +} + +- (void)stopStreaming +{ + SCTraceStart(); + if (_isStreaming) { + _isStreaming = NO; + [_player pause]; + [self removePlayerObservers]; + } +} + +- (void)pauseStreaming +{ + [self stopStreaming]; +} + +- (void)addListener:(id)listener +{ + SCTraceStart(); + [_announcer addListener:listener]; +} + +- (void)removeListener:(id)listener +{ + SCTraceStart(); + [_announcer removeListener:listener]; +} + +- (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + _devicePosition = devicePosition; +} + +- (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + _devicePosition = devicePosition; +} + +- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation +{ + _videoOrientation = videoOrientation; +} + +- (void)removeAsOutput:(AVCaptureSession *)session +{ + // Ignored +} + +- (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported +{ + // Ignored +} + +- (void)beginConfiguration +{ + // Ignored +} + +- (void)commitConfiguration +{ + // Ignored +} + +- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest +{ + // Ignored +} + +#pragma mark - AVPlayerItemOutputPullDelegate + +- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender +{ + if (![_videoOutput hasNewPixelBufferForItemTime:CMTimeMake(1, 10)]) { + [self configureOutput]; + } + [_displayLink setPaused:NO]; +} + +#pragma mark - Internal + +- (void)displayLinkCallback:(CADisplayLink *)sender +{ + CFTimeInterval nextVSync = [sender timestamp] + [sender duration]; + + CMTime time = [_videoOutput itemTimeForHostTime:nextVSync]; + if (dispatch_semaphore_wait(_semaphore, DISPATCH_TIME_NOW) == 0) { + [_performer perform:^{ + if ([_videoOutput hasNewPixelBufferForItemTime:time]) { + CVPixelBufferRef pixelBuffer = [_videoOutput copyPixelBufferForItemTime:time itemTimeForDisplay:NULL]; + if (pixelBuffer != NULL) { + if (_nextPixelBufferHandler) { + _nextPixelBufferHandler(pixelBuffer); + _nextPixelBufferHandler = nil; + } else { + CMSampleBufferRef sampleBuffer = + [self createSampleBufferFromPixelBuffer:pixelBuffer + presentationTime:CMTimeMake(CACurrentMediaTime() * 1000, 1000)]; + if (sampleBuffer) { + if (_sampleBufferDisplayEnabled) { + [_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer]; + } + [_announcer managedVideoDataSource:self + didOutputSampleBuffer:sampleBuffer + devicePosition:_devicePosition]; + CFRelease(sampleBuffer); + } + } + CVBufferRelease(pixelBuffer); + } + } + dispatch_semaphore_signal(_semaphore); + }]; + } +} + +- (CMSampleBufferRef)createSampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime)time +{ + CMSampleBufferRef sampleBuffer = NULL; + CMVideoFormatDescriptionRef formatDesc = NULL; + + OSStatus err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc); + if (err != noErr) { + return NULL; + } + + CMSampleTimingInfo sampleTimingInfo = {kCMTimeInvalid, time, kCMTimeInvalid}; + CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, formatDesc, + &sampleTimingInfo, &sampleBuffer); + + CFRelease(formatDesc); + + return sampleBuffer; +} + +- (void)configureOutput +{ + // Remove old output + if (_videoOutput) { + [[_player currentItem] removeOutput:_videoOutput]; + } + + // Setup AVPlayerItemVideoOutput with the required pixelbuffer attributes. + _videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{ + (id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) + }]; + _videoOutput.suppressesPlayerRendering = YES; + [_videoOutput setDelegate:self queue:_performer.queue]; + + // Add new output + [[_player currentItem] addOutput:_videoOutput]; + [_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:1.0 / 30.0]; +} + +- (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion +{ + _nextPixelBufferHandler = completion; +} + +- (void)addDidPlayToEndTimeNotificationForPlayerItem:(AVPlayerItem *)item +{ + if (_notificationToken) { + _notificationToken = nil; + } + + _player.actionAtItemEnd = AVPlayerActionAtItemEndNone; + _notificationToken = + [[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemDidPlayToEndTimeNotification + object:item + queue:[NSOperationQueue mainQueue] + usingBlock:^(NSNotification *note) { + [[_player currentItem] seekToTime:kCMTimeZero]; + }]; +} + +- (void)removePlayerObservers +{ + if (_notificationToken) { + [[NSNotificationCenter defaultCenter] removeObserver:_notificationToken + name:AVPlayerItemDidPlayToEndTimeNotification + object:_player.currentItem]; + _notificationToken = nil; + } +} + +@end diff --git a/ManagedCapturer/SCManagedVideoFrameSampler.h b/ManagedCapturer/SCManagedVideoFrameSampler.h new file mode 100644 index 0000000..69fa80c --- /dev/null +++ b/ManagedCapturer/SCManagedVideoFrameSampler.h @@ -0,0 +1,22 @@ +// +// SCManagedVideoFrameSampler.h +// Snapchat +// +// Created by Michel Loenngren on 3/10/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturerListener.h" + +#import + +/** + Allows consumer to register a block to sample the next CMSampleBufferRef and + automatically leverages Core image to convert the pixel buffer to a UIImage. + Returned image will be a copy. + */ +@interface SCManagedVideoFrameSampler : NSObject + +- (void)sampleNextFrame:(void (^)(UIImage *frame, CMTime presentationTime))completeBlock; + +@end diff --git a/ManagedCapturer/SCManagedVideoFrameSampler.m b/ManagedCapturer/SCManagedVideoFrameSampler.m new file mode 100644 index 0000000..1d0eb62 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoFrameSampler.m @@ -0,0 +1,65 @@ +// +// SCManagedVideoFrameSampler.m +// Snapchat +// +// Created by Michel Loenngren on 3/10/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoFrameSampler.h" + +#import +#import + +@import CoreImage; +@import ImageIO; + +@interface SCManagedVideoFrameSampler () + +@property (nonatomic, copy) void (^frameSampleBlock)(UIImage *, CMTime); +@property (nonatomic, strong) CIContext *ciContext; + +@end + +@implementation SCManagedVideoFrameSampler + +- (void)sampleNextFrame:(void (^)(UIImage *, CMTime))completeBlock +{ + _frameSampleBlock = completeBlock; +} + +#pragma mark - SCManagedCapturerListener + +- (void)managedCapturer:(id)managedCapturer + didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer + sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata +{ + void (^block)(UIImage *, CMTime) = _frameSampleBlock; + _frameSampleBlock = nil; + + if (!block) { + return; + } + + CVImageBufferRef cvImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + UIImage *image; + if (cvImageBuffer) { + CGImageRef cgImage = SCCreateCGImageFromPixelBufferRef(cvImageBuffer); + image = [[UIImage alloc] initWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationRight]; + CGImageRelease(cgImage); + } + runOnMainThreadAsynchronously(^{ + block(image, presentationTime); + }); +} + +- (CIContext *)ciContext +{ + if (!_ciContext) { + _ciContext = [CIContext context]; + } + return _ciContext; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoNoSoundLogger.h b/ManagedCapturer/SCManagedVideoNoSoundLogger.h new file mode 100644 index 0000000..23e5772 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoNoSoundLogger.h @@ -0,0 +1,44 @@ +// +// SCManagedVideoNoSoundLogger.h +// Snapchat +// +// Created by Pinlin Chen on 15/07/2017. +// +// + +#import + +#import +#import + +@protocol SCManiphestTicketCreator; + +@interface SCManagedVideoNoSoundLogger : NSObject + +@property (nonatomic, strong) NSError *audioSessionError; +@property (nonatomic, strong) NSError *audioQueueError; +@property (nonatomic, strong) NSError *assetWriterError; +@property (nonatomic, assign) BOOL retryAudioQueueSuccess; +@property (nonatomic, assign) BOOL retryAudioQueueSuccessSetDataSource; +@property (nonatomic, strong) NSString *brokenMicCodeType; +@property (nonatomic, assign) BOOL lenseActiveWhileRecording; +@property (nonatomic, strong) NSString *activeLensId; +@property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay; +@property (nonatomic, assign) BOOL audioQueueStarted; + +SC_INIT_AND_NEW_UNAVAILABLE +- (instancetype)initWithTicketCreator:(id)ticketCreator; + +/* Use to counting how many no sound issue we have fixed */ +// Call at the place where we have fixed the AVPlayer leak before ++ (void)startCountingVideoNoSoundHaveBeenFixed; + +/* Use to report the detail of new no sound issue */ +// Reset all the properties of recording error +- (void)resetAll; +// Log if the audio track is empty +- (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL; +// called by AVCameraViewController when lense resume audio +- (void)managedLensesProcessorDidCallResumeAllSounds; + +@end diff --git a/ManagedCapturer/SCManagedVideoNoSoundLogger.m b/ManagedCapturer/SCManagedVideoNoSoundLogger.m new file mode 100644 index 0000000..f0a5dd0 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoNoSoundLogger.m @@ -0,0 +1,283 @@ +// +// SCManagedVideoNoSoundLogger.m +// Snapchat +// +// Created by Pinlin Chen on 15/07/2017. +// +// + +#import "SCManagedVideoNoSoundLogger.h" + +#import "SCManagedCapturer.h" +#import "SCManiphestTicketCreator.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import + +@import AVFoundation; + +static BOOL s_startCountingVideoNoSoundFixed; +// Count the number of no sound errors for an App session +static NSUInteger s_noSoundCaseCount = 0; + +@interface SCManagedVideoNoSoundLogger () { + BOOL _isAudioSessionDeactivated; + int _lenseResumeCount; +} + +@property (nonatomic) id ticketCreator; + +@end + +@implementation SCManagedVideoNoSoundLogger + +- (instancetype)initWithTicketCreator:(id)ticketCreator +{ + if (self = [super init]) { + _ticketCreator = ticketCreator; + } + return self; +} + ++ (NSUInteger)noSoundCount +{ + return s_noSoundCaseCount; +} + ++ (void)increaseNoSoundCount +{ + s_noSoundCaseCount += 1; +} + ++ (void)startCountingVideoNoSoundHaveBeenFixed +{ + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + s_startCountingVideoNoSoundFixed = YES; + SCLogGeneralInfo(@"start counting video no sound have been fixed"); + }); +} + ++ (NSString *)appSessionIdForNoSound +{ + static dispatch_once_t onceToken; + static NSString *s_AppSessionIdForNoSound = @"SCDefaultSession"; + dispatch_once(&onceToken, ^{ + s_AppSessionIdForNoSound = SCUUID(); + }); + return s_AppSessionIdForNoSound; +} + ++ (void)logVideoNoSoundHaveBeenFixedIfNeeded +{ + if (s_startCountingVideoNoSoundFixed) { + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:@{ + @"have_been_fixed" : @"true", + @"fixed_type" : @"player_leak", + @"asset_writer_success" : @"true", + @"audio_session_success" : @"true", + @"audio_queue_success" : @"true", + } + secretParameters:nil + metrics:nil]; + } +} + ++ (void)logAudioSessionCategoryHaveBeenFixed +{ + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:@{ + @"have_been_fixed" : @"true", + @"fixed_type" : @"audio_session_category_mismatch", + @"asset_writer_success" : @"true", + @"audio_session_success" : @"true", + @"audio_queue_success" : @"true", + } + secretParameters:nil + metrics:nil]; +} + ++ (void)logAudioSessionBrokenMicHaveBeenFixed:(NSString *)type +{ + [[SCLogger sharedInstance] + logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:@{ + @"have_been_fixed" : @"true", + @"fixed_type" : @"broken_microphone", + @"asset_writer_success" : @"true", + @"audio_session_success" : @"true", + @"audio_queue_success" : @"true", + @"mic_broken_type" : SC_NULL_STRING_IF_NIL(type), + @"audio_session_debug_info" : + [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", + } + secretParameters:nil + metrics:nil]; +} + +- (instancetype)init +{ + if (self = [super init]) { + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_audioSessionWillDeactivate) + name:SCAudioSessionWillDeactivateNotification + object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_audioSessionDidActivate) + name:SCAudioSessionActivatedNotification + object:nil]; + _firstWrittenAudioBufferDelay = kCMTimeInvalid; + } + return self; +} + +- (void)resetAll +{ + _audioQueueError = nil; + _audioSessionError = nil; + _assetWriterError = nil; + _retryAudioQueueSuccess = NO; + _retryAudioQueueSuccessSetDataSource = NO; + _brokenMicCodeType = nil; + _lenseActiveWhileRecording = NO; + _lenseResumeCount = 0; + _activeLensId = nil; + self.firstWrittenAudioBufferDelay = kCMTimeInvalid; +} + +- (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL +{ + AVURLAsset *asset = [AVURLAsset assetWithURL:videoURL]; + + __block BOOL hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0); + + dispatch_block_t block = ^{ + + // Log no audio issues have been fixed + if (hasAudioTrack) { + if (_retryAudioQueueSuccess) { + [SCManagedVideoNoSoundLogger logAudioSessionCategoryHaveBeenFixed]; + } else if (_retryAudioQueueSuccessSetDataSource) { + [SCManagedVideoNoSoundLogger logAudioSessionBrokenMicHaveBeenFixed:_brokenMicCodeType]; + } else { + [SCManagedVideoNoSoundLogger logVideoNoSoundHaveBeenFixedIfNeeded]; + } + } else { + // Log no audio issues caused by no permission into "wont_fixed_type", won't show in Grafana + BOOL isPermissonGranted = + [[SCAudioSession sharedInstance] recordPermission] == AVAudioSessionRecordPermissionGranted; + if (!isPermissonGranted) { + [SCManagedVideoNoSoundLogger increaseNoSoundCount]; + [[SCLogger sharedInstance] + logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:@{ + @"wont_fix_type" : @"no_permission", + @"no_sound_count" : + [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", + @"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" + } + secretParameters:nil + metrics:nil]; + + } + // Log no audio issues caused by microphone occupied into "wont_fixed_type", for example Phone Call, + // It won't show in Grafana + // TODO: maybe we should prompt the user of these errors in the future + else if (_audioSessionError.code == AVAudioSessionErrorInsufficientPriority || + _audioQueueError.code == AVAudioSessionErrorInsufficientPriority) { + NSDictionary *parameters = @{ + @"wont_fix_type" : @"microphone_in_use", + @"asset_writer_error" : _assetWriterError ? [_assetWriterError description] : @"(null)", + @"audio_session_error" : _audioSessionError.userInfo ?: @"(null)", + @"audio_queue_error" : _audioQueueError.userInfo ?: @"(null)", + @"audio_session_deactivated" : _isAudioSessionDeactivated ? @"true" : @"false", + @"audio_session_debug_info" : + [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", + @"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", + @"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" + }; + + [SCManagedVideoNoSoundLogger increaseNoSoundCount]; + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:parameters + secretParameters:nil + metrics:nil]; + [_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)]; + } else { + // Log other new no audio issues, use "have_been_fixed=false" to show in Grafana + NSDictionary *parameters = @{ + @"have_been_fixed" : @"false", + @"asset_writer_error" : _assetWriterError ? [_assetWriterError description] : @"(null)", + @"audio_session_error" : _audioSessionError.userInfo ?: @"(null)", + @"audio_queue_error" : _audioQueueError.userInfo ?: @"(null)", + @"asset_writer_success" : [NSString stringWithBool:_assetWriterError == nil], + @"audio_session_success" : [NSString stringWithBool:_audioSessionError == nil], + @"audio_queue_success" : [NSString stringWithBool:_audioQueueError == nil], + @"audio_session_deactivated" : _isAudioSessionDeactivated ? @"true" : @"false", + @"video_duration" : [NSString sc_stringWithFormat:@"%f", CMTimeGetSeconds(asset.duration)], + @"is_audio_session_nil" : + [[SCAudioSession sharedInstance] noSoundCheckAudioSessionIsNil] ? @"true" : @"false", + @"lenses_active" : [NSString stringWithBool:self.lenseActiveWhileRecording], + @"active_lense_id" : self.activeLensId ?: @"(null)", + @"lense_audio_resume_count" : @(_lenseResumeCount), + @"first_audio_buffer_delay" : + [NSString sc_stringWithFormat:@"%f", CMTimeGetSeconds(self.firstWrittenAudioBufferDelay)], + @"audio_session_debug_info" : + [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", + @"audio_queue_started" : [NSString stringWithBool:_audioQueueStarted], + @"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", + @"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" + }; + [SCManagedVideoNoSoundLogger increaseNoSoundCount]; + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError + parameters:parameters + secretParameters:nil + metrics:nil]; + [_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)]; + } + } + }; + if (hasAudioTrack) { + block(); + } else { + // Wait for all tracks to be loaded, in case of error counting the metric + [asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ] + completionHandler:^{ + // Return when the tracks couldn't be loaded + NSError *error = nil; + if ([asset statusOfValueForKey:@"tracks" error:&error] != AVKeyValueStatusLoaded || + error != nil) { + return; + } + + // check audio track again + hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0); + runOnMainThreadAsynchronously(block); + }]; + } +} + +- (void)_audioSessionWillDeactivate +{ + _isAudioSessionDeactivated = YES; +} + +- (void)_audioSessionDidActivate +{ + _isAudioSessionDeactivated = NO; +} + +- (void)managedLensesProcessorDidCallResumeAllSounds +{ + _lenseResumeCount += 1; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoScanner.h b/ManagedCapturer/SCManagedVideoScanner.h new file mode 100644 index 0000000..e2dfe72 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoScanner.h @@ -0,0 +1,35 @@ +// +// SCManagedVideoScanner.h +// Snapchat +// +// Created by Liu Liu on 5/5/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturer.h" +#import "SCManagedDeviceCapacityAnalyzerListener.h" + +#import + +#import + +@class SCScanConfiguration; + +@interface SCManagedVideoScanner : NSObject + +/** + * Calling this method to start scan, scan will automatically stop when a snapcode detected + */ +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration; + +/** + * Calling this method to stop scan immediately (it is still possible that a successful scan can happen after this is + * called) + */ +- (void)stopScanAsynchronously; + +- (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration + maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration + restCycle:(float)restCycle; + +@end diff --git a/ManagedCapturer/SCManagedVideoScanner.m b/ManagedCapturer/SCManagedVideoScanner.m new file mode 100644 index 0000000..36eaa7a --- /dev/null +++ b/ManagedCapturer/SCManagedVideoScanner.m @@ -0,0 +1,299 @@ +// +// SCManagedVideoScanner.m +// Snapchat +// +// Created by Liu Liu on 5/5/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoScanner.h" + +#import "SCScanConfiguration.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import + +// In seconds +static NSTimeInterval const kDefaultScanTimeout = 60; + +static const char *kSCManagedVideoScannerQueueLabel = "com.snapchat.scvideoscanningcapturechannel.video.snapcode-scan"; + +@interface SCManagedVideoScanner () + +@end + +@implementation SCManagedVideoScanner { + SCSnapScanner *_snapScanner; + dispatch_semaphore_t _activeSemaphore; + NSTimeInterval _maxFrameDuration; // Used to restrict how many frames the scanner processes + NSTimeInterval _maxFrameDefaultDuration; + NSTimeInterval _maxFramePassiveDuration; + float _restCycleOfBusyCycle; + NSTimeInterval _scanStartTime; + BOOL _active; + BOOL _shouldEmitEvent; + dispatch_block_t _completionHandler; + NSTimeInterval _scanTimeout; + SCManagedCaptureDevicePosition _devicePosition; + SCQueuePerformer *_performer; + BOOL _adjustingFocus; + NSArray *_codeTypes; + NSArray *_codeTypesOld; + sc_managed_capturer_scan_results_handler_t _scanResultsHandler; + + SCUserSession *_userSession; +} + +- (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration + maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration + restCycle:(float)restCycle +{ + SCTraceStart(); + self = [super init]; + if (self) { + _snapScanner = [SCSnapScanner sharedInstance]; + _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoScannerQueueLabel + qualityOfService:QOS_CLASS_UNSPECIFIED + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + _activeSemaphore = dispatch_semaphore_create(0); + SCAssert(restCycle >= 0 && restCycle < 1, @"rest cycle should be between 0 to 1"); + _maxFrameDefaultDuration = maxFrameDefaultDuration; + _maxFramePassiveDuration = maxFramePassiveDuration; + _restCycleOfBusyCycle = restCycle / (1 - restCycle); // Give CPU time to rest + } + return self; +} +#pragma mark - Public methods + +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration +{ + SCTraceStart(); + [_performer perform:^{ + _shouldEmitEvent = YES; + _completionHandler = nil; + _scanResultsHandler = configuration.scanResultsHandler; + _userSession = configuration.userSession; + _scanTimeout = kDefaultScanTimeout; + _maxFrameDuration = _maxFrameDefaultDuration; + _codeTypes = [self _scanCodeTypes]; + _codeTypesOld = @[ @(SCCodeTypeSnapcode18x18Old), @(SCCodeTypeQRCode) ]; + + SCTraceStart(); + // Set the scan start time properly, if we call startScan multiple times while it is active, + // This makes sure we can scan long enough. + _scanStartTime = CACurrentMediaTime(); + // we are not active, need to send the semaphore to start the scan + if (!_active) { + _active = YES; + + // Signal the semaphore that we can start scan! + dispatch_semaphore_signal(_activeSemaphore); + } + }]; +} + +- (void)stopScanAsynchronously +{ + SCTraceStart(); + [_performer perform:^{ + SCTraceStart(); + if (_active) { + SCLogScanDebug(@"VideoScanner:stopScanAsynchronously turn off from active"); + _active = NO; + _scanStartTime = 0; + _scanResultsHandler = nil; + _userSession = nil; + } else { + SCLogScanDebug(@"VideoScanner:stopScanAsynchronously off already"); + } + }]; +} + +#pragma mark - Private Methods + +- (void)_handleSnapScanResult:(SCSnapScannedData *)scannedData +{ + if (scannedData.hasScannedData) { + if (scannedData.codeType == SCCodeTypeSnapcode18x18 || scannedData.codeType == SCCodeTypeSnapcodeBitmoji || + scannedData.codeType == SCCodeTypeSnapcode18x18Old) { + NSString *data = [scannedData.rawData base64EncodedString]; + NSString *version = [NSString sc_stringWithFormat:@"%i", scannedData.codeTypeMeta]; + [[SCLogger sharedInstance] logEvent:@"SNAPCODE_18x18_SCANNED_FROM_CAMERA" + parameters:@{ + @"version" : version + } + secretParameters:@{ + @"data" : data + }]; + + if (_completionHandler != nil) { + runOnMainThreadAsynchronously(_completionHandler); + _completionHandler = nil; + } + } else if (scannedData.codeType == SCCodeTypeBarcode) { + if (!_userSession || !_userSession.featureSettingsManager.barCodeScanEnabled) { + return; + } + NSString *data = scannedData.data; + NSString *type = [SCSnapScannedData stringFromBarcodeType:scannedData.codeTypeMeta]; + [[SCLogger sharedInstance] logEvent:@"BARCODE_SCANNED_FROM_CAMERA" + parameters:@{ + @"type" : type + } + secretParameters:@{ + @"data" : data + }]; + } else if (scannedData.codeType == SCCodeTypeQRCode) { + if (!_userSession || !_userSession.featureSettingsManager.qrCodeScanEnabled) { + return; + } + NSURL *url = [NSURL URLWithString:scannedData.data]; + [[SCLogger sharedInstance] logEvent:@"QR_CODE_SCANNED_FROM_CAMERA" + parameters:@{ + @"type" : (url) ? @"url" : @"other" + } + secretParameters:@{}]; + } + + if (_shouldEmitEvent) { + sc_managed_capturer_scan_results_handler_t scanResultsHandler = _scanResultsHandler; + runOnMainThreadAsynchronously(^{ + if (scanResultsHandler != nil && scannedData) { + SCMachineReadableCodeResult *result = + [SCMachineReadableCodeResult machineReadableCodeResultWithScannedData:scannedData]; + scanResultsHandler(result); + } + }); + } + } +} + +- (NSArray *)_scanCodeTypes +{ + // Scan types are defined by codetypes. SnapScan will scan the frame based on codetype. + NSMutableArray *codeTypes = [[NSMutableArray alloc] + initWithObjects:@(SCCodeTypeSnapcode18x18), @(SCCodeTypeQRCode), @(SCCodeTypeSnapcodeBitmoji), nil]; + if (SCSearchEnableBarcodeProductSearch()) { + [codeTypes addObject:@(SCCodeTypeBarcode)]; + } + return [codeTypes copy]; +} + +#pragma mark - SCManagedVideoDataSourceListener + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + _devicePosition = devicePosition; + + if (!_active) { + SCLogScanDebug(@"VideoScanner: Scanner is not active"); + return; + } + SCLogScanDebug(@"VideoScanner: Scanner is active"); + + // If we have the semaphore now, enqueue a new buffer, otherwise drop the buffer + if (dispatch_semaphore_wait(_activeSemaphore, DISPATCH_TIME_NOW) == 0) { + CFRetain(sampleBuffer); + NSTimeInterval startTime = CACurrentMediaTime(); + [_performer perform:^{ + SCTraceStart(); + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + SCLogScanInfo(@"VideoScanner: Scanner will scan a frame"); + SCSnapScannedData *scannedData; + + SCLogScanInfo(@"VideoScanner:Use new scanner without false alarm check"); + scannedData = [_snapScanner scanPixelBuffer:pixelBuffer forCodeTypes:_codeTypes]; + + if ([UIDevice shouldLogPerfEvents]) { + NSInteger loadingMs = (CACurrentMediaTime() - startTime) * 1000; + // Since there are too many unsuccessful scans, we will only log 1/10 of them for now. + if (scannedData.hasScannedData || (!scannedData.hasScannedData && arc4random() % 10 == 0)) { + [[SCLogger sharedInstance] logEvent:@"SCAN_SINGLE_FRAME" + parameters:@{ + @"time_span" : @(loadingMs), + @"has_scanned_data" : @(scannedData.hasScannedData), + }]; + } + } + + [self _handleSnapScanResult:scannedData]; + // If it is not turned off, we will continue to scan if result is not presetn + if (_active) { + _active = !scannedData.hasScannedData; + } + + // Clean up if result is reported for scan + if (!_active) { + _scanResultsHandler = nil; + _completionHandler = nil; + } + + CFRelease(sampleBuffer); + + NSTimeInterval currentTime = CACurrentMediaTime(); + SCLogScanInfo(@"VideoScanner:Scan time %f maxFrameDuration:%f timeout:%f", currentTime - startTime, + _maxFrameDuration, _scanTimeout); + // Haven't found the scanned data yet, haven't reached maximum scan timeout yet, haven't turned this off + // yet, ready for the next frame + if (_active && currentTime < _scanStartTime + _scanTimeout) { + // We've finished processing current sample buffer, ready for next one, but before that, we need to rest + // a bit (if possible) + if (currentTime - startTime >= _maxFrameDuration && _restCycleOfBusyCycle < FLT_MIN) { + // If we already reached deadline (used too much time) and don't want to rest CPU, give the signal + // now to grab the next frame + SCLogScanInfo(@"VideoScanner:Signal to get next frame for snapcode scanner"); + dispatch_semaphore_signal(_activeSemaphore); + } else { + NSTimeInterval afterTime = MAX((currentTime - startTime) * _restCycleOfBusyCycle, + _maxFrameDuration - (currentTime - startTime)); + // If we need to wait more than 0 second, then do that, otherwise grab the next frame immediately + if (afterTime > 0) { + [_performer perform:^{ + SCLogScanInfo( + @"VideoScanner:Waited and now signaling to get next frame for snapcode scanner"); + dispatch_semaphore_signal(_activeSemaphore); + } + after:afterTime]; + } else { + SCLogScanInfo(@"VideoScanner:Now signaling to get next frame for snapcode scanner"); + dispatch_semaphore_signal(_activeSemaphore); + } + } + } else { + // We are not active, and not going to be active any more. + SCLogScanInfo(@"VideoScanner:not active anymore"); + _active = NO; + _scanResultsHandler = nil; + _completionHandler = nil; + } + }]; + } +} + +#pragma mark - SCManagedDeviceCapacityAnalyzerListener + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingFocus:(BOOL)adjustingFocus +{ + [_performer perform:^{ + _adjustingFocus = adjustingFocus; + }]; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoStreamReporter.h b/ManagedCapturer/SCManagedVideoStreamReporter.h new file mode 100644 index 0000000..22ef049 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoStreamReporter.h @@ -0,0 +1,15 @@ +// +// SCManagedVideoStreamReporter.h +// Snapchat +// +// Created by Liu Liu on 5/16/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import + +#import + +@interface SCManagedVideoStreamReporter : NSObject + +@end diff --git a/ManagedCapturer/SCManagedVideoStreamReporter.m b/ManagedCapturer/SCManagedVideoStreamReporter.m new file mode 100644 index 0000000..a0addeb --- /dev/null +++ b/ManagedCapturer/SCManagedVideoStreamReporter.m @@ -0,0 +1,58 @@ +// +// SCManagedVideoStreamReporter.m +// Snapchat +// +// Created by Liu Liu on 5/16/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedVideoStreamReporter.h" + +#import +#import + +static NSTimeInterval const SCManagedVideoStreamReporterInterval = 10; + +@implementation SCManagedVideoStreamReporter { + NSUInteger _droppedSampleBuffers; + NSUInteger _outputSampleBuffers; + NSTimeInterval _lastReportTime; +} + +- (instancetype)init +{ + self = [super init]; + if (self) { + _lastReportTime = CACurrentMediaTime(); + } + return self; +} + +- (void)_reportIfNeeded +{ + NSTimeInterval currentTime = CACurrentMediaTime(); + if (currentTime - _lastReportTime > SCManagedVideoStreamReporterInterval) { + SCLogGeneralInfo(@"Time: (%.3f - %.3f], Video Streamer Dropped %tu, Output %tu", _lastReportTime, currentTime, + _droppedSampleBuffers, _outputSampleBuffers); + _droppedSampleBuffers = _outputSampleBuffers = 0; + _lastReportTime = currentTime; + } +} + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + ++_outputSampleBuffers; + [self _reportIfNeeded]; +} + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + ++_droppedSampleBuffers; + [self _reportIfNeeded]; +} + +@end diff --git a/ManagedCapturer/SCManagedVideoStreamer.h b/ManagedCapturer/SCManagedVideoStreamer.h new file mode 100644 index 0000000..8432d12 --- /dev/null +++ b/ManagedCapturer/SCManagedVideoStreamer.h @@ -0,0 +1,36 @@ +// +// SCManagedVideoStreamer.h +// Snapchat +// +// Created by Liu Liu on 4/30/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedVideoARDataSource.h" + +#import + +#import +#import + +@class ARSession; + +/** + * SCManagedVideoStreamer uses the current AVCaptureSession to create + * and publish video output frames. SCManagedVideoStreamer also conforms + * to SCManagedVideoDataSource allowing chained consumption of video frames. + */ +@interface SCManagedVideoStreamer : NSObject + +- (instancetype)initWithSession:(AVCaptureSession *)session + devicePosition:(SCManagedCaptureDevicePosition)devicePosition; + +- (instancetype)initWithSession:(AVCaptureSession *)session + arSession:(ARSession *)arSession + devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0); + +- (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition; + +- (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0); + +@end diff --git a/ManagedCapturer/SCManagedVideoStreamer.m b/ManagedCapturer/SCManagedVideoStreamer.m new file mode 100644 index 0000000..83bfa5e --- /dev/null +++ b/ManagedCapturer/SCManagedVideoStreamer.m @@ -0,0 +1,823 @@ +// +// SCManagedVideoStreamer.m +// Snapchat +// +// Created by Liu Liu on 4/30/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedVideoStreamer.h" + +#import "ARConfiguration+SCConfiguration.h" +#import "SCCameraTweaks.h" +#import "SCCapturerDefines.h" +#import "SCLogger+Camera.h" +#import "SCManagedCapturePreviewLayerController.h" +#import "SCMetalUtils.h" +#import "SCProcessingPipeline.h" +#import "SCProcessingPipelineBuilder.h" + +#import +#import +#import +#import +#import +#import + +#import + +#import +#import + +@import ARKit; +@import AVFoundation; + +#define SCLogVideoStreamerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) +#define SCLogVideoStreamerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) +#define SCLogVideoStreamerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) + +static NSInteger const kSCCaptureFrameRate = 30; +static CGFloat const kSCLogInterval = 3.0; +static char *const kSCManagedVideoStreamerQueueLabel = "com.snapchat.managed-video-streamer"; +static char *const kSCManagedVideoStreamerCallbackQueueLabel = "com.snapchat.managed-video-streamer.dequeue"; +static NSTimeInterval const kSCManagedVideoStreamerMaxAllowedLatency = 1; // Drop the frame if it is 1 second late. + +static NSTimeInterval const kSCManagedVideoStreamerStalledDisplay = + 5; // If the frame is not updated for 5 seconds, it is considered to be stalled. + +static NSTimeInterval const kSCManagedVideoStreamerARSessionFramerateCap = + 1.0 / (kSCCaptureFrameRate + 1); // Restrict ARSession to 30fps +static int32_t const kSCManagedVideoStreamerMaxProcessingBuffers = 15; + +@interface SCManagedVideoStreamer () + +@property (nonatomic, strong) AVCaptureSession *captureSession; + +@end + +@implementation SCManagedVideoStreamer { + AVCaptureVideoDataOutput *_videoDataOutput; + AVCaptureDepthDataOutput *_depthDataOutput NS_AVAILABLE_IOS(11_0); + AVCaptureDataOutputSynchronizer *_dataOutputSynchronizer NS_AVAILABLE_IOS(11_0); + BOOL _performingConfigurations; + SCManagedCaptureDevicePosition _devicePosition; + BOOL _videoStabilizationEnabledIfSupported; + SCManagedVideoDataSourceListenerAnnouncer *_announcer; + + BOOL _sampleBufferDisplayEnabled; + id _sampleBufferDisplayController; + dispatch_block_t _flushOutdatedPreviewBlock; + NSMutableArray *_waitUntilSampleBufferDisplayedBlocks; + SCProcessingPipeline *_processingPipeline; + + NSTimeInterval _lastDisplayedFrameTimestamp; +#ifdef SC_USE_ARKIT_FACE + NSTimeInterval _lastDisplayedDepthFrameTimestamp; +#endif + + BOOL _depthCaptureEnabled; + CGPoint _portraitModePointOfInterest; + + // For sticky video tweaks + BOOL _keepLateFrames; + SCQueuePerformer *_callbackPerformer; + atomic_int _processingBuffersCount; +} + +@synthesize isStreaming = _isStreaming; +@synthesize performer = _performer; +@synthesize currentFrame = _currentFrame; +@synthesize fieldOfView = _fieldOfView; +#ifdef SC_USE_ARKIT_FACE +@synthesize lastDepthData = _lastDepthData; +#endif +@synthesize videoOrientation = _videoOrientation; + +- (instancetype)initWithSession:(AVCaptureSession *)session + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + self = [super init]; + if (self) { + _sampleBufferDisplayEnabled = YES; + _announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init]; + // We discard frames to support lenses in real time + _keepLateFrames = NO; + _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + + _videoOrientation = AVCaptureVideoOrientationLandscapeRight; + + [self setupWithSession:session devicePosition:devicePosition]; + SCLogVideoStreamerInfo(@"init with position:%lu", (unsigned long)devicePosition); + } + return self; +} + +- (instancetype)initWithSession:(AVCaptureSession *)session + arSession:(ARSession *)arSession + devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0) +{ + self = [self initWithSession:session devicePosition:devicePosition]; + if (self) { + [self setupWithARSession:arSession]; + self.currentFrame = nil; +#ifdef SC_USE_ARKIT_FACE + self.lastDepthData = nil; +#endif + } + return self; +} + +- (AVCaptureVideoDataOutput *)_newVideoDataOutput +{ + AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; + // All inbound frames are going to be the native format of the camera avoid + // any need for transcoding. + output.videoSettings = + @{(NSString *) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) }; + return output; +} + +- (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + [self stopStreaming]; + self.captureSession = session; + _devicePosition = devicePosition; + + _videoDataOutput = [self _newVideoDataOutput]; + if (SCDeviceSupportsMetal()) { + // We default to start the streaming if the Metal is supported at startup time. + _isStreaming = YES; + // Set the sample buffer delegate before starting it. + [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; + } + + if ([session canAddOutput:_videoDataOutput]) { + [session addOutput:_videoDataOutput]; + [self _enableVideoMirrorForDevicePosition:devicePosition]; + } + + if (SCCameraTweaksEnablePortraitModeButton()) { + if (@available(iOS 11.0, *)) { + _depthDataOutput = [[AVCaptureDepthDataOutput alloc] init]; + [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; + if ([session canAddOutput:_depthDataOutput]) { + [session addOutput:_depthDataOutput]; + [_depthDataOutput setDelegate:self callbackQueue:_performer.queue]; + } + _depthCaptureEnabled = NO; + } + _portraitModePointOfInterest = CGPointMake(0.5, 0.5); + } + + [self setVideoStabilizationEnabledIfSupported:YES]; +} + +- (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0) +{ + arSession.delegateQueue = _performer.queue; + arSession.delegate = self; +} + +- (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController +{ + [_performer perform:^{ + _sampleBufferDisplayController = sampleBufferDisplayController; + SCLogVideoStreamerInfo(@"add sampleBufferDisplayController:%@", _sampleBufferDisplayController); + }]; +} + +- (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled +{ + [_performer perform:^{ + _sampleBufferDisplayEnabled = sampleBufferDisplayEnabled; + SCLogVideoStreamerInfo(@"sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled); + }]; +} + +- (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler +{ + SCAssert(queue, @"callback queue must be provided"); + SCAssert(completionHandler, @"completion handler must be provided"); + SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed queue:%@ completionHandler:%p isStreaming:%d", queue, + completionHandler, _isStreaming); + if (_isStreaming) { + [_performer perform:^{ + if (!_waitUntilSampleBufferDisplayedBlocks) { + _waitUntilSampleBufferDisplayedBlocks = [NSMutableArray array]; + } + [_waitUntilSampleBufferDisplayedBlocks addObject:@[ queue, completionHandler ]]; + SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed add block:%p", completionHandler); + }]; + } else { + dispatch_async(queue, completionHandler); + } +} + +- (void)startStreaming +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"start streaming. _isStreaming:%d", _isStreaming); + if (!_isStreaming) { + _isStreaming = YES; + [self _cancelFlushOutdatedPreview]; + if (@available(ios 11.0, *)) { + if (_depthCaptureEnabled) { + [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:YES]; + } + } + [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; + } +} + +- (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + if ([session canAddOutput:_videoDataOutput]) { + SCLogVideoStreamerError(@"add videoDataOutput:%@", _videoDataOutput); + [session addOutput:_videoDataOutput]; + [self _enableVideoMirrorForDevicePosition:devicePosition]; + } else { + SCLogVideoStreamerError(@"cannot add videoDataOutput:%@ to session:%@", _videoDataOutput, session); + } + [self _enableVideoStabilizationIfSupported]; +} + +- (void)removeAsOutput:(AVCaptureSession *)session +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"remove videoDataOutput:%@ from session:%@", _videoDataOutput, session); + [session removeOutput:_videoDataOutput]; +} + +- (void)_cancelFlushOutdatedPreview +{ + SCLogVideoStreamerInfo(@"cancel flush outdated preview:%p", _flushOutdatedPreviewBlock); + if (_flushOutdatedPreviewBlock) { + dispatch_block_cancel(_flushOutdatedPreviewBlock); + _flushOutdatedPreviewBlock = nil; + } +} + +- (SCQueuePerformer *)callbackPerformer +{ + // If sticky video tweak is on, use a separated performer queue + if (_keepLateFrames) { + if (!_callbackPerformer) { + _callbackPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerCallbackQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + } + return _callbackPerformer; + } + return _performer; +} + +- (void)pauseStreaming +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"pauseStreaming isStreaming:%d", _isStreaming); + if (_isStreaming) { + _isStreaming = NO; + [_videoDataOutput setSampleBufferDelegate:nil queue:NULL]; + if (@available(ios 11.0, *)) { + if (_depthCaptureEnabled) { + [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; + } + } + @weakify(self); + _flushOutdatedPreviewBlock = dispatch_block_create(0, ^{ + SCLogVideoStreamerInfo(@"execute flushOutdatedPreviewBlock"); + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + [self->_sampleBufferDisplayController flushOutdatedPreview]; + }); + [_performer perform:_flushOutdatedPreviewBlock + after:SCCameraTweaksEnableKeepLastFrameOnCamera() ? kSCManagedVideoStreamerStalledDisplay : 0]; + [_performer perform:^{ + [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; + }]; + } +} + +- (void)stopStreaming +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"stopStreaming isStreaming:%d", _isStreaming); + if (_isStreaming) { + _isStreaming = NO; + [_videoDataOutput setSampleBufferDelegate:nil queue:NULL]; + if (@available(ios 11.0, *)) { + if (_depthCaptureEnabled) { + [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; + } + } + } + [self _cancelFlushOutdatedPreview]; + [_performer perform:^{ + SCLogVideoStreamerInfo(@"stopStreaming in perfome queue"); + [_sampleBufferDisplayController flushOutdatedPreview]; + [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; + }]; +} + +- (void)beginConfiguration +{ + SCLogVideoStreamerInfo(@"enter beginConfiguration"); + [_performer perform:^{ + SCLogVideoStreamerInfo(@"performingConfigurations set to YES"); + _performingConfigurations = YES; + }]; +} + +- (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCLogVideoStreamerInfo(@"setDevicePosition with newPosition:%lu", (unsigned long)devicePosition); + [self _enableVideoMirrorForDevicePosition:devicePosition]; + [self _enableVideoStabilizationIfSupported]; + [_performer perform:^{ + SCLogVideoStreamerInfo(@"setDevicePosition in perform queue oldPosition:%lu newPosition:%lu", + (unsigned long)_devicePosition, (unsigned long)devicePosition); + if (_devicePosition != devicePosition) { + _devicePosition = devicePosition; + } + }]; +} + +- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation +{ + SCTraceStart(); + // It is not neccessary call these changes on private queue, because is is just only data output configuration. + // It should be called from manged capturer queue to prevent lock capture session in two different(private and + // managed capturer) queues that will cause the deadlock. + SCLogVideoStreamerInfo(@"setVideoOrientation oldOrientation:%lu newOrientation:%lu", + (unsigned long)_videoOrientation, (unsigned long)videoOrientation); + _videoOrientation = videoOrientation; + AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; + connection.videoOrientation = _videoOrientation; +} + +- (void)setKeepLateFrames:(BOOL)keepLateFrames +{ + SCTraceStart(); + [_performer perform:^{ + SCTraceStart(); + if (keepLateFrames != _keepLateFrames) { + _keepLateFrames = keepLateFrames; + // Get and set corresponding queue base on keepLateFrames. + // We don't use AVCaptureVideoDataOutput.alwaysDiscardsLateVideo anymore, because it will potentially + // result in lenses regression, and we could use all 15 sample buffers by adding a separated calllback + // queue. + [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; + SCLogVideoStreamerInfo(@"keepLateFrames was set to:%d", keepLateFrames); + } + }]; +} + +- (void)setDepthCaptureEnabled:(BOOL)enabled NS_AVAILABLE_IOS(11_0) +{ + _depthCaptureEnabled = enabled; + [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:enabled]; + if (enabled) { + _dataOutputSynchronizer = + [[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs:@[ _videoDataOutput, _depthDataOutput ]]; + [_dataOutputSynchronizer setDelegate:self queue:_performer.queue]; + } else { + _dataOutputSynchronizer = nil; + } +} + +- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest +{ + _portraitModePointOfInterest = pointOfInterest; +} + +- (BOOL)getKeepLateFrames +{ + return _keepLateFrames; +} + +- (void)commitConfiguration +{ + SCLogVideoStreamerInfo(@"enter commitConfiguration"); + [_performer perform:^{ + SCLogVideoStreamerInfo(@"performingConfigurations set to NO"); + _performingConfigurations = NO; + }]; +} + +- (void)addListener:(id)listener +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"add listener:%@", listener); + [_announcer addListener:listener]; +} + +- (void)removeListener:(id)listener +{ + SCTraceStart(); + SCLogVideoStreamerInfo(@"remove listener:%@", listener); + [_announcer removeListener:listener]; +} + +- (void)addProcessingPipeline:(SCProcessingPipeline *)processingPipeline +{ + SCLogVideoStreamerInfo(@"enter addProcessingPipeline:%@", processingPipeline); + [_performer perform:^{ + SCLogVideoStreamerInfo(@"processingPipeline set to %@", processingPipeline); + _processingPipeline = processingPipeline; + }]; +} + +- (void)removeProcessingPipeline +{ + SCLogVideoStreamerInfo(@"enter removeProcessingPipeline"); + [_performer perform:^{ + SCLogVideoStreamerInfo(@"processingPipeline set to nil"); + _processingPipeline = nil; + }]; +} + +- (BOOL)isVideoMirrored +{ + SCTraceStart(); + AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; + return connection.isVideoMirrored; +} + +#pragma mark - Common Sample Buffer Handling + +- (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + return [self didOutputSampleBuffer:sampleBuffer depthData:nil]; +} + +- (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer depthData:(CVPixelBufferRef)depthDataMap +{ + // Don't send the sample buffer if we are perform configurations + if (_performingConfigurations) { + SCLogVideoStreamerError(@"didOutputSampleBuffer return because performingConfigurations is YES"); + return; + } + SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]); + + // We can't set alwaysDiscardsLateVideoFrames to YES when lens is activated because it will cause camera freezing. + // When alwaysDiscardsLateVideoFrames is set to NO, the late frames will not be dropped until it reach 15 frames, + // so we should simulate the dropping behaviour as AVFoundation do. + NSTimeInterval presentationTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); + _lastDisplayedFrameTimestamp = presentationTime; + NSTimeInterval frameLatency = CACurrentMediaTime() - presentationTime; + // Log interval definied in macro LOG_INTERVAL, now is 3.0s + BOOL shouldLog = + (long)(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * kSCCaptureFrameRate) % + ((long)(kSCCaptureFrameRate * kSCLogInterval)) == + 0; + if (shouldLog) { + SCLogVideoStreamerInfo(@"didOutputSampleBuffer:%p", sampleBuffer); + } + if (_processingPipeline) { + RenderData renderData = { + .sampleBuffer = sampleBuffer, + .depthDataMap = depthDataMap, + .depthBlurPointOfInterest = + SCCameraTweaksEnablePortraitModeAutofocus() || SCCameraTweaksEnablePortraitModeTapToFocus() + ? &_portraitModePointOfInterest + : nil, + }; + // Ensure we are doing all render operations (i.e. accessing textures) on performer to prevent race condition + SCAssertPerformer(_performer); + sampleBuffer = [_processingPipeline render:renderData]; + + if (shouldLog) { + SCLogVideoStreamerInfo(@"rendered sampleBuffer:%p in processingPipeline:%@", sampleBuffer, + _processingPipeline); + } + } + + if (sampleBuffer && _sampleBufferDisplayEnabled) { + // Send the buffer only if it is valid, set it to be displayed immediately (See the enqueueSampleBuffer method + // header, need to get attachments array and set the dictionary). + CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES); + if (!attachmentsArray) { + SCLogVideoStreamerError(@"Error getting attachment array for CMSampleBuffer"); + } else if (CFArrayGetCount(attachmentsArray) > 0) { + CFMutableDictionaryRef attachment = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, 0); + CFDictionarySetValue(attachment, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); + } + // Warn if frame that went through is not most recent enough. + if (frameLatency >= kSCManagedVideoStreamerMaxAllowedLatency) { + SCLogVideoStreamerWarning( + @"The sample buffer we received is too late, why? presentationTime:%lf frameLatency:%f", + presentationTime, frameLatency); + } + [_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer]; + if (shouldLog) { + SCLogVideoStreamerInfo(@"displayed sampleBuffer:%p in Metal", sampleBuffer); + } + + [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; + } + + if (shouldLog) { + SCLogVideoStreamerInfo(@"begin annoucing sampleBuffer:%p of devicePosition:%lu", sampleBuffer, + (unsigned long)_devicePosition); + } + [_announcer managedVideoDataSource:self didOutputSampleBuffer:sampleBuffer devicePosition:_devicePosition]; + if (shouldLog) { + SCLogVideoStreamerInfo(@"end annoucing sampleBuffer:%p", sampleBuffer); + } +} + +- (void)didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + if (_performingConfigurations) { + return; + } + SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]); + NSTimeInterval currentProcessingTime = CACurrentMediaTime(); + NSTimeInterval currentSampleTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); + // Only logging it when sticky tweak is on, which means sticky time is too long, and AVFoundation have to drop the + // sampleBuffer + if (_keepLateFrames) { + SCLogVideoStreamerInfo(@"didDropSampleBuffer:%p timestamp:%f latency:%f", sampleBuffer, currentProcessingTime, + currentSampleTime); + } + [_announcer managedVideoDataSource:self didDropSampleBuffer:sampleBuffer devicePosition:_devicePosition]; +} + +#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate + +- (void)captureOutput:(AVCaptureOutput *)captureOutput +didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(11_0) +{ + // Sticky video tweak is off, i.e. lenses is on, + // we use same queue for callback and processing, and let AVFoundation decide which frame should be dropped + if (!_keepLateFrames) { + [self didOutputSampleBuffer:sampleBuffer]; + } + // Sticky video tweak is on + else { + if ([_performer isCurrentPerformer]) { + // Note: there might be one frame callbacked in processing queue when switching callback queue, + // it should be fine. But if following log appears too much, it is not our design. + SCLogVideoStreamerWarning(@"The callback queue should be a separated queue when sticky tweak is on"); + } + // TODO: In sticky video v2, we should consider check free memory + if (_processingBuffersCount >= kSCManagedVideoStreamerMaxProcessingBuffers - 1) { + SCLogVideoStreamerWarning(@"processingBuffersCount reached to the max. current count:%d", + _processingBuffersCount); + [self didDropSampleBuffer:sampleBuffer]; + return; + } + atomic_fetch_add(&_processingBuffersCount, 1); + CFRetain(sampleBuffer); + // _performer should always be the processing queue + [_performer perform:^{ + [self didOutputSampleBuffer:sampleBuffer]; + CFRelease(sampleBuffer); + atomic_fetch_sub(&_processingBuffersCount, 1); + }]; + } +} + +- (void)captureOutput:(AVCaptureOutput *)captureOutput + didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection +{ + [self didDropSampleBuffer:sampleBuffer]; +} + +#pragma mark - AVCaptureDataOutputSynchronizer (Video + Depth) + +- (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer + didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection + NS_AVAILABLE_IOS(11_0) +{ + AVCaptureSynchronizedDepthData *syncedDepthData = (AVCaptureSynchronizedDepthData *)[synchronizedDataCollection + synchronizedDataForCaptureOutput:_depthDataOutput]; + AVDepthData *depthData = nil; + if (syncedDepthData && !syncedDepthData.depthDataWasDropped) { + depthData = syncedDepthData.depthData; + } + + AVCaptureSynchronizedSampleBufferData *syncedVideoData = + (AVCaptureSynchronizedSampleBufferData *)[synchronizedDataCollection + synchronizedDataForCaptureOutput:_videoDataOutput]; + if (syncedVideoData && !syncedVideoData.sampleBufferWasDropped) { + CMSampleBufferRef videoSampleBuffer = syncedVideoData.sampleBuffer; + [self didOutputSampleBuffer:videoSampleBuffer depthData:depthData ? depthData.depthDataMap : nil]; + } +} + +#pragma mark - ARSessionDelegate + +- (void)session:(ARSession *)session cameraDidChangeTrackingState:(ARCamera *)camera NS_AVAILABLE_IOS(11_0) +{ + NSString *state = nil; + NSString *reason = nil; + switch (camera.trackingState) { + case ARTrackingStateNormal: + state = @"Normal"; + break; + case ARTrackingStateLimited: + state = @"Limited"; + break; + case ARTrackingStateNotAvailable: + state = @"Not Available"; + break; + } + switch (camera.trackingStateReason) { + case ARTrackingStateReasonNone: + reason = @"None"; + break; + case ARTrackingStateReasonInitializing: + reason = @"Initializing"; + break; + case ARTrackingStateReasonExcessiveMotion: + reason = @"Excessive Motion"; + break; + case ARTrackingStateReasonInsufficientFeatures: + reason = @"Insufficient Features"; + break; +#if SC_AT_LEAST_SDK_11_3 + case ARTrackingStateReasonRelocalizing: + reason = @"Relocalizing"; + break; +#endif + } + SCLogVideoStreamerInfo(@"ARKit changed tracking state - %@ (reason: %@)", state, reason); +} + +- (void)session:(ARSession *)session didUpdateFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0) +{ +#ifdef SC_USE_ARKIT_FACE + // This is extremely weird, but LOOK-10251 indicates that despite the class having it defined, on some specific + // devices there are ARFrame instances that don't respond to `capturedDepthData`. + // (note: this was discovered to be due to some people staying on iOS 11 betas). + AVDepthData *depth = nil; + if ([frame respondsToSelector:@selector(capturedDepthData)]) { + depth = frame.capturedDepthData; + } +#endif + + CGFloat timeSince = frame.timestamp - _lastDisplayedFrameTimestamp; + // Don't deliver more than 30 frames per sec + BOOL framerateMinimumElapsed = timeSince >= kSCManagedVideoStreamerARSessionFramerateCap; + +#ifdef SC_USE_ARKIT_FACE + if (depth) { + CGFloat timeSince = frame.timestamp - _lastDisplayedDepthFrameTimestamp; + framerateMinimumElapsed |= timeSince >= kSCManagedVideoStreamerARSessionFramerateCap; + } + +#endif + + SC_GUARD_ELSE_RETURN(framerateMinimumElapsed); + +#ifdef SC_USE_ARKIT_FACE + if (depth) { + self.lastDepthData = depth; + _lastDisplayedDepthFrameTimestamp = frame.timestamp; + } +#endif + + // Make sure that current frame is no longer being used, otherwise drop current frame. + SC_GUARD_ELSE_RETURN(self.currentFrame == nil); + + CVPixelBufferRef pixelBuffer = frame.capturedImage; + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + CMTime time = CMTimeMakeWithSeconds(frame.timestamp, 1000000); + CMSampleTimingInfo timing = {kCMTimeInvalid, time, kCMTimeInvalid}; + + CMVideoFormatDescriptionRef videoInfo; + CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo); + + CMSampleBufferRef buffer; + CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, nil, nil, videoInfo, &timing, &buffer); + CFRelease(videoInfo); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + self.currentFrame = frame; + [self didOutputSampleBuffer:buffer]; + [self _updateFieldOfViewWithARFrame:frame]; + + CFRelease(buffer); +} + +- (void)session:(ARSession *)session didAddAnchors:(NSArray *)anchors NS_AVAILABLE_IOS(11_0) +{ + for (ARAnchor *anchor in anchors) { + if ([anchor isKindOfClass:[ARPlaneAnchor class]]) { + SCLogVideoStreamerInfo(@"ARKit added plane anchor"); + return; + } + } +} + +- (void)session:(ARSession *)session didFailWithError:(NSError *)error NS_AVAILABLE_IOS(11_0) +{ + SCLogVideoStreamerError(@"ARKit session failed with error: %@. Resetting", error); + [session runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:_devicePosition]]; +} + +- (void)sessionWasInterrupted:(ARSession *)session NS_AVAILABLE_IOS(11_0) +{ + SCLogVideoStreamerWarning(@"ARKit session interrupted"); +} + +- (void)sessionInterruptionEnded:(ARSession *)session NS_AVAILABLE_IOS(11_0) +{ + SCLogVideoStreamerInfo(@"ARKit interruption ended"); +} + +#pragma mark - Private methods + +- (void)_performCompletionHandlersForWaitUntilSampleBufferDisplayed +{ + for (NSArray *completion in _waitUntilSampleBufferDisplayedBlocks) { + // Call the completion handlers. + dispatch_async(completion[0], completion[1]); + } + [_waitUntilSampleBufferDisplayedBlocks removeAllObjects]; +} + +// This is the magic that ensures the VideoDataOutput will have the correct +// orientation. +- (void)_enableVideoMirrorForDevicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCLogVideoStreamerInfo(@"enable video mirror for device position:%lu", (unsigned long)devicePosition); + AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; + connection.videoOrientation = _videoOrientation; + if (devicePosition == SCManagedCaptureDevicePositionFront) { + connection.videoMirrored = YES; + } +} + +- (void)_enableVideoStabilizationIfSupported +{ + SCTraceStart(); + if (!SCCameraTweaksEnableVideoStabilization()) { + SCLogVideoStreamerWarning(@"SCCameraTweaksEnableVideoStabilization is NO, won't enable video stabilization"); + return; + } + + AVCaptureConnection *videoConnection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; + if (!videoConnection) { + SCLogVideoStreamerError(@"cannot get videoConnection from videoDataOutput:%@", videoConnection); + return; + } + // Set the video stabilization mode to auto. Default is off. + if ([videoConnection isVideoStabilizationSupported]) { + videoConnection.preferredVideoStabilizationMode = _videoStabilizationEnabledIfSupported + ? AVCaptureVideoStabilizationModeStandard + : AVCaptureVideoStabilizationModeOff; + NSDictionary *params = @{ @"iOS8_Mode" : @(videoConnection.activeVideoStabilizationMode) }; + [[SCLogger sharedInstance] logEvent:@"VIDEO_STABILIZATION_MODE" parameters:params]; + SCLogVideoStreamerInfo(@"set video stabilization mode:%ld to videoConnection:%@", + (long)videoConnection.preferredVideoStabilizationMode, videoConnection); + } else { + SCLogVideoStreamerInfo(@"video stabilization isn't supported on videoConnection:%@", videoConnection); + } +} + +- (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported +{ + SCLogVideoStreamerInfo(@"setVideoStabilizationEnabledIfSupported:%d", videoStabilizationIfSupported); + _videoStabilizationEnabledIfSupported = videoStabilizationIfSupported; + [self _enableVideoStabilizationIfSupported]; +} + +- (void)_updateFieldOfViewWithARFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0) +{ + SC_GUARD_ELSE_RETURN(frame.camera); + CGSize imageResolution = frame.camera.imageResolution; + matrix_float3x3 intrinsics = frame.camera.intrinsics; + float xFovDegrees = 2 * atan(imageResolution.width / (2 * intrinsics.columns[0][0])) * 180 / M_PI; + if (_fieldOfView != xFovDegrees) { + self.fieldOfView = xFovDegrees; + } +} + +- (NSString *)description +{ + return [self debugDescription]; +} + +- (NSString *)debugDescription +{ + NSDictionary *debugDict = @{ + @"_sampleBufferDisplayEnabled" : _sampleBufferDisplayEnabled ? @"Yes" : @"No", + @"_videoStabilizationEnabledIfSupported" : _videoStabilizationEnabledIfSupported ? @"Yes" : @"No", + @"_performingConfigurations" : _performingConfigurations ? @"Yes" : @"No", + @"alwaysDiscardLateVideoFrames" : _videoDataOutput.alwaysDiscardsLateVideoFrames ? @"Yes" : @"No" + }; + return [NSString sc_stringWithFormat:@"%@", debugDict]; +} + +@end diff --git a/ManagedCapturer/SCMetalUtils.h b/ManagedCapturer/SCMetalUtils.h new file mode 100644 index 0000000..211ada7 --- /dev/null +++ b/ManagedCapturer/SCMetalUtils.h @@ -0,0 +1,63 @@ +// +// SCMetalUtils.h +// Snapchat +// +// Created by Michel Loenngren on 7/11/17. +// +// Utility class for metal related helpers. + +#import +#if !TARGET_IPHONE_SIMULATOR +#import +#endif +#import + +#import + +SC_EXTERN_C_BEGIN + +#if !TARGET_IPHONE_SIMULATOR +extern id SCGetManagedCaptureMetalDevice(void); +#endif + +static SC_ALWAYS_INLINE BOOL SCDeviceSupportsMetal(void) +{ +#if TARGET_CPU_ARM64 + return YES; // All 64 bit system supports Metal. +#else + return NO; +#endif +} + +#if !TARGET_IPHONE_SIMULATOR +static inline id SCMetalTextureFromPixelBuffer(CVPixelBufferRef pixelBuffer, size_t planeIndex, + MTLPixelFormat pixelFormat, + CVMetalTextureCacheRef textureCache) +{ + size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex); + size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex); + CVMetalTextureRef textureRef; + if (kCVReturnSuccess != CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer, + nil, pixelFormat, width, height, planeIndex, + &textureRef)) { + return nil; + } + id texture = CVMetalTextureGetTexture(textureRef); + CVBufferRelease(textureRef); + return texture; +} + +static inline void SCMetalCopyTexture(id texture, CVPixelBufferRef pixelBuffer, NSUInteger planeIndex) +{ + CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + void *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, planeIndex); + NSUInteger bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, planeIndex); + MTLRegion region = MTLRegionMake2D(0, 0, CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex), + CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex)); + + [texture getBytes:baseAddress bytesPerRow:bytesPerRow fromRegion:region mipmapLevel:0]; + CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); +} +#endif + +SC_EXTERN_C_END diff --git a/ManagedCapturer/SCMetalUtils.m b/ManagedCapturer/SCMetalUtils.m new file mode 100644 index 0000000..79c58d3 --- /dev/null +++ b/ManagedCapturer/SCMetalUtils.m @@ -0,0 +1,25 @@ +// +// SCMetalUtils.m +// Snapchat +// +// Created by Michel Loenngren on 8/16/17. +// +// + +#import "SCMetalUtils.h" + +#import + +id SCGetManagedCaptureMetalDevice(void) +{ +#if !TARGET_IPHONE_SIMULATOR + SCTraceStart(); + static dispatch_once_t onceToken; + static id device; + dispatch_once(&onceToken, ^{ + device = MTLCreateSystemDefaultDevice(); + }); + return device; +#endif + return nil; +} diff --git a/ManagedCapturer/SCScanConfiguration.h b/ManagedCapturer/SCScanConfiguration.h new file mode 100644 index 0000000..738e813 --- /dev/null +++ b/ManagedCapturer/SCScanConfiguration.h @@ -0,0 +1,18 @@ +// +// SCScanConfiguration.h +// Snapchat +// +// Created by Yang Dai on 3/7/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturer.h" + +#import + +@interface SCScanConfiguration : NSObject + +@property (nonatomic, strong) sc_managed_capturer_scan_results_handler_t scanResultsHandler; +@property (nonatomic, strong) SCUserSession *userSession; + +@end diff --git a/ManagedCapturer/SCScanConfiguration.m b/ManagedCapturer/SCScanConfiguration.m new file mode 100644 index 0000000..9be8200 --- /dev/null +++ b/ManagedCapturer/SCScanConfiguration.m @@ -0,0 +1,13 @@ +// +// SCScanConfiguration.m +// Snapchat +// +// Created by Yang Dai on 3/7/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCScanConfiguration.h" + +@implementation SCScanConfiguration + +@end diff --git a/ManagedCapturer/SCSingleFrameStreamCapturer.h b/ManagedCapturer/SCSingleFrameStreamCapturer.h new file mode 100644 index 0000000..a154430 --- /dev/null +++ b/ManagedCapturer/SCSingleFrameStreamCapturer.h @@ -0,0 +1,17 @@ +// +// SCSingleFrameStreamCapturer.h +// Snapchat +// +// Created by Benjamin Hollis on 5/3/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCCaptureCommon.h" + +#import + +#import + +@interface SCSingleFrameStreamCapturer : NSObject +- (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler; +@end diff --git a/ManagedCapturer/SCSingleFrameStreamCapturer.m b/ManagedCapturer/SCSingleFrameStreamCapturer.m new file mode 100644 index 0000000..38813b5 --- /dev/null +++ b/ManagedCapturer/SCSingleFrameStreamCapturer.m @@ -0,0 +1,103 @@ +// +// SCSingleFrameStreamCapturer.m +// Snapchat +// +// Created by Benjamin Hollis on 5/3/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCSingleFrameStreamCapturer.h" + +#import "SCManagedCapturer.h" + +@implementation SCSingleFrameStreamCapturer { + sc_managed_capturer_capture_video_frame_completion_handler_t _callback; +} + +- (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler +{ + self = [super init]; + if (self) { + _callback = completionHandler; + } + return self; +} + +#pragma mark - SCManagedVideoDataSourceListener + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + if (_callback) { + UIImage *image = [self imageFromSampleBuffer:sampleBuffer]; + _callback(image); + } + _callback = nil; +} + +/** + * Decode a CMSampleBufferRef to our native camera format (kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, + * as set in SCManagedVideoStreamer) to a UIImage. + * + * Code from http://stackoverflow.com/a/31553521/11284 + */ +#define clamp(a) (a > 255 ? 255 : (a < 0 ? 0 : a)) +// TODO: Use the transform code from SCImageProcessIdentityYUVCommand +- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + CVPixelBufferLockBaseAddress(imageBuffer, 0); + + size_t width = CVPixelBufferGetWidth(imageBuffer); + size_t height = CVPixelBufferGetHeight(imageBuffer); + uint8_t *yBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); + size_t yPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0); + uint8_t *cbCrBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1); + size_t cbCrPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1); + + int bytesPerPixel = 4; + uint8_t *rgbBuffer = malloc(width * height * bytesPerPixel); + + for (int y = 0; y < height; y++) { + uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel]; + uint8_t *yBufferLine = &yBuffer[y * yPitch]; + uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch]; + + for (int x = 0; x < width; x++) { + int16_t y = yBufferLine[x]; + int16_t cb = cbCrBufferLine[x & ~1] - 128; + int16_t cr = cbCrBufferLine[x | 1] - 128; + + uint8_t *rgbOutput = &rgbBufferLine[x * bytesPerPixel]; + + int16_t r = (int16_t)roundf(y + cr * 1.4); + int16_t g = (int16_t)roundf(y + cb * -0.343 + cr * -0.711); + int16_t b = (int16_t)roundf(y + cb * 1.765); + + rgbOutput[0] = 0xff; + rgbOutput[1] = clamp(b); + rgbOutput[2] = clamp(g); + rgbOutput[3] = clamp(r); + } + } + + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); + CGContextRef context = CGBitmapContextCreate(rgbBuffer, width, height, 8, width * bytesPerPixel, colorSpace, + kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast); + CGImageRef quartzImage = CGBitmapContextCreateImage(context); + + // TODO: Hardcoding UIImageOrientationRight seems cheesy + UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight]; + + CGContextRelease(context); + CGColorSpaceRelease(colorSpace); + CGImageRelease(quartzImage); + free(rgbBuffer); + + CVPixelBufferUnlockBaseAddress(imageBuffer, 0); + + return image; +} + +@end diff --git a/ManagedCapturer/SCStillImageCaptureVideoInputMethod.h b/ManagedCapturer/SCStillImageCaptureVideoInputMethod.h new file mode 100644 index 0000000..1704e53 --- /dev/null +++ b/ManagedCapturer/SCStillImageCaptureVideoInputMethod.h @@ -0,0 +1,19 @@ +// +// SCStillImageCaptureVideoInputMethod.h +// Snapchat +// +// Created by Alexander Grytsiuk on 3/16/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturerState.h" + +#import + +@interface SCStillImageCaptureVideoInputMethod : NSObject + +- (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state + successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo, + NSError *error))successBlock + failureBlock:(void (^)(NSError *error))failureBlock; +@end diff --git a/ManagedCapturer/SCStillImageCaptureVideoInputMethod.m b/ManagedCapturer/SCStillImageCaptureVideoInputMethod.m new file mode 100644 index 0000000..ea6cb05 --- /dev/null +++ b/ManagedCapturer/SCStillImageCaptureVideoInputMethod.m @@ -0,0 +1,140 @@ +// +// SCStillImageCaptureVideoInputMethod.m +// Snapchat +// +// Created by Alexander Grytsiuk on 3/16/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCStillImageCaptureVideoInputMethod.h" + +#import "SCManagedCapturer.h" +#import "SCManagedVideoFileStreamer.h" + +typedef unsigned char uchar_t; +int clamp(int val, int low, int high) +{ + if (val < low) + val = low; + if (val > high) + val = high; + return val; +} + +void yuv2rgb(uchar_t yValue, uchar_t uValue, uchar_t vValue, uchar_t *r, uchar_t *g, uchar_t *b) +{ + double red = yValue + (1.370705 * (vValue - 128)); + double green = yValue - (0.698001 * (vValue - 128)) - (0.337633 * (uValue - 128)); + double blue = yValue + (1.732446 * (uValue - 128)); + *r = clamp(red, 0, 255); + *g = clamp(green, 0, 255); + *b = clamp(blue, 0, 255); +} + +void convertNV21DataToRGBData(int width, int height, uchar_t *nv21Data, uchar_t *rgbData, int rgbBytesPerPixel, + int rgbBytesPerRow) +{ + uchar_t *uvData = nv21Data + height * width; + for (int h = 0; h < height; h++) { + uchar_t *yRowBegin = nv21Data + h * width; + uchar_t *uvRowBegin = uvData + h / 2 * width; + uchar_t *rgbRowBegin = rgbData + rgbBytesPerRow * h; + for (int w = 0; w < width; w++) { + uchar_t *rgbPixelBegin = rgbRowBegin + rgbBytesPerPixel * w; + yuv2rgb(yRowBegin[w], uvRowBegin[w / 2 * 2], uvRowBegin[w / 2 * 2 + 1], &(rgbPixelBegin[0]), + &(rgbPixelBegin[1]), &(rgbPixelBegin[2])); + } + } +} + +@implementation SCStillImageCaptureVideoInputMethod + +- (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state + successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo, + NSError *error))successBlock + failureBlock:(void (^)(NSError *error))failureBlock +{ + id videoDataSource = [[SCManagedCapturer sharedInstance] currentVideoDataSource]; + if ([videoDataSource isKindOfClass:[SCManagedVideoFileStreamer class]]) { + SCManagedVideoFileStreamer *videoFileStreamer = (SCManagedVideoFileStreamer *)videoDataSource; + [videoFileStreamer getNextPixelBufferWithCompletion:^(CVPixelBufferRef pixelBuffer) { + BOOL shouldFlip = state.devicePosition == SCManagedCaptureDevicePositionFront; +#if TARGET_IPHONE_SIMULATOR + UIImage *uiImage = [self imageWithCVPixelBuffer:pixelBuffer]; + CGImageRef videoImage = uiImage.CGImage; + UIImage *capturedImage = [UIImage + imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:uiImage.size].CGImage : videoImage + scale:1.0 + orientation:UIImageOrientationRight]; +#else + CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; + CIContext *temporaryContext = [CIContext contextWithOptions:nil]; + + CGSize size = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer)); + CGImageRef videoImage = + [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, size.width, size.height)]; + + UIImage *capturedImage = + [UIImage imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:size].CGImage : videoImage + scale:1.0 + orientation:UIImageOrientationRight]; + + CGImageRelease(videoImage); +#endif + if (successBlock) { + successBlock(UIImageJPEGRepresentation(capturedImage, 1.0), nil, nil); + } + }]; + } else { + if (failureBlock) { + failureBlock([NSError errorWithDomain:NSStringFromClass(self.class) code:-1 userInfo:nil]); + } + } +} + +- (UIImage *)flipCGImage:(CGImageRef)cgImage size:(CGSize)size +{ + UIGraphicsBeginImageContext(size); + CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, size.width, size.height), cgImage); + UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); + UIGraphicsEndImageContext(); + return image; +} + +- (UIImage *)imageWithCVPixelBuffer:(CVPixelBufferRef)imageBuffer +{ + CVPixelBufferLockBaseAddress(imageBuffer, 0); + + size_t width = CVPixelBufferGetWidth(imageBuffer); + size_t height = CVPixelBufferGetHeight(imageBuffer); + size_t rgbBytesPerPixel = 4; + size_t rgbBytesPerRow = width * rgbBytesPerPixel; + + uchar_t *nv21Data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); + uchar_t *rgbData = malloc(rgbBytesPerRow * height); + + convertNV21DataToRGBData((int)width, (int)height, nv21Data, rgbData, (int)rgbBytesPerPixel, (int)rgbBytesPerRow); + + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); + CGContextRef context = + CGBitmapContextCreate(rgbData, width, height, 8, rgbBytesPerRow, colorSpace, kCGImageAlphaNoneSkipLast); + CGImageRef cgImage = CGBitmapContextCreateImage(context); + + UIImage *result = [UIImage imageWithCGImage:cgImage]; + + CGImageRelease(cgImage); + CGContextRelease(context); + CGColorSpaceRelease(colorSpace); + free(rgbData); + + CVPixelBufferUnlockBaseAddress(imageBuffer, 0); + + return result; +} + +- (NSString *)methodName +{ + return @"VideoInput"; +} + +@end diff --git a/ManagedCapturer/SCTimedTask.h b/ManagedCapturer/SCTimedTask.h new file mode 100644 index 0000000..f5a4e15 --- /dev/null +++ b/ManagedCapturer/SCTimedTask.h @@ -0,0 +1,28 @@ +// +// SCTimedTask.h +// Snapchat +// +// Created by Michel Loenngren on 4/2/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import +#import + +/* + Block based timed task + */ +@interface SCTimedTask : NSObject + +@property (nonatomic, assign) CMTime targetTime; +@property (nonatomic, copy) void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond); + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithTargetTime:(CMTime)targetTime + task:(void (^)(CMTime relativePresentationTime, + CGFloat sessionStartTimeDelayInSecond))task; + +- (NSString *)description; + +@end diff --git a/ManagedCapturer/SCTimedTask.m b/ManagedCapturer/SCTimedTask.m new file mode 100644 index 0000000..babf445 --- /dev/null +++ b/ManagedCapturer/SCTimedTask.m @@ -0,0 +1,32 @@ +// +// SCTimedTask.m +// Snapchat +// +// Created by Michel Loenngren on 4/2/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCTimedTask.h" + +#import + +@implementation SCTimedTask + +- (instancetype)initWithTargetTime:(CMTime)targetTime + task: + (void (^)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond))task +{ + if (self = [super init]) { + _targetTime = targetTime; + _task = task; + } + return self; +} + +- (NSString *)description +{ + return [NSString + sc_stringWithFormat:@"<%@: %p, targetTime: %lld>", NSStringFromClass([self class]), self, _targetTime.value]; +} + +@end diff --git a/ManagedCapturer/SCVideoCaptureSessionInfo.h b/ManagedCapturer/SCVideoCaptureSessionInfo.h new file mode 100644 index 0000000..b89da3e --- /dev/null +++ b/ManagedCapturer/SCVideoCaptureSessionInfo.h @@ -0,0 +1,83 @@ +// +// SCVideoCaptureSessionInfo.h +// Snapchat +// +// Created by Michel Loenngren on 3/27/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import + +#import +#import + +typedef NS_ENUM(NSInteger, SCManagedVideoCapturerInfoType) { + SCManagedVideoCapturerInfoAudioQueueError, + SCManagedVideoCapturerInfoAssetWriterError, + SCManagedVideoCapturerInfoAudioSessionError, + SCManagedVideoCapturerInfoAudioQueueRetrySuccess, + SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue, + SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware +}; + +typedef u_int32_t sc_managed_capturer_recording_session_t; + +/* + Container object holding information about the + current recording session. + */ +typedef struct { + CMTime startTime; + CMTime endTime; + CMTime duration; + sc_managed_capturer_recording_session_t sessionId; +} SCVideoCaptureSessionInfo; + +static inline SCVideoCaptureSessionInfo SCVideoCaptureSessionInfoMake(CMTime startTime, CMTime endTime, + sc_managed_capturer_recording_session_t sessionId) +{ + SCVideoCaptureSessionInfo session; + session.startTime = startTime; + session.endTime = endTime; + if (CMTIME_IS_VALID(startTime) && CMTIME_IS_VALID(endTime)) { + session.duration = CMTimeSubtract(endTime, startTime); + } else { + session.duration = kCMTimeInvalid; + } + session.sessionId = sessionId; + return session; +} + +static inline NSTimeInterval SCVideoCaptureSessionInfoGetCurrentDuration(SCVideoCaptureSessionInfo sessionInfo) +{ + if (CMTIME_IS_VALID(sessionInfo.startTime)) { + if (CMTIME_IS_VALID(sessionInfo.endTime)) { + return CMTimeGetSeconds(sessionInfo.duration); + } + return CACurrentMediaTime() - CMTimeGetSeconds(sessionInfo.startTime); + } + return 0; +} + +static inline NSString *SCVideoCaptureSessionInfoGetDebugString(CMTime time, NSString *label) +{ + if (CMTIME_IS_VALID(time)) { + return [NSString sc_stringWithFormat:@"%@: %f", label, CMTimeGetSeconds(time)]; + } else { + return [NSString sc_stringWithFormat:@"%@: Invalid", label]; + } +} + +static inline NSString *SCVideoCaptureSessionInfoGetDebugDescription(SCVideoCaptureSessionInfo sessionInfo) +{ + NSMutableString *description = [NSMutableString new]; + [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.startTime, @"StartTime")]; + [description appendString:@", "]; + [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.endTime, @"EndTime")]; + [description appendString:@", "]; + [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.duration, @"Duration")]; + [description appendString:@", "]; + [description appendString:[NSString sc_stringWithFormat:@"Id: %u", sessionInfo.sessionId]]; + + return [description copy]; +} diff --git a/ManagedCapturer/StateMachine/SCCaptureBaseState.h b/ManagedCapturer/StateMachine/SCCaptureBaseState.h new file mode 100644 index 0000000..ef18f00 --- /dev/null +++ b/ManagedCapturer/StateMachine/SCCaptureBaseState.h @@ -0,0 +1,103 @@ +// +// SCCaptureBaseState.h +// Snapchat +// +// Created by Lin Jia on 10/19/17. +// +// + +#import "SCCaptureCommon.h" +#import "SCCaptureStateDelegate.h" +#import "SCCaptureStateMachineBookKeeper.h" +#import "SCCaptureStateUtil.h" +#import "SCCaptureWorker.h" +#import "SCManagedCaptureDevice.h" +#import "SCManagedCapturerState.h" +#import "SCStateTransitionPayload.h" + +#import + +@class SCCaptureResource; + +@class SCCapturerToken; + +@class SCAudioConfiguration; + +@class SCQueuePerformer; +/* + Every state machine state needs to inherent SCCaptureBaseState to have the APIs. State machine state in general will + only implement APIs which are legal for itself. If illegal APIs are invoked, SCCaptureBaseState will handle it. + The intended behavior: + 1) crash using SCAssert in Debug build, + 2) ignore api call, and log the call, for alpha/master/production. + 3) in the future, we will introduce dangerous API call concept, and restart camera in such case, to avoid bad state. + + Every state machine state is going to be built to follow functional programming as more as possible. The shared + resources between them will be passed into the API via SCCaptureResource. + */ + +@interface SCCaptureBaseState : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate; + +/* The following API will be invoked at the moment state context promote the state to be current state. State use this + * chance to do something, such as start recording for recording state. + */ +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context; + +- (SCCaptureStateMachineStateId)stateId; + +- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)startRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource + audioConfiguration:(SCAudioConfiguration *)configuration + context:(NSString *)context; + +- (void)startRecordingWithResource:(SCCaptureResource *)resource + audioConfiguration:(SCAudioConfiguration *)configuration + outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context; + +- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context; + +- (void)captureStillImageWithResource:(SCCaptureResource *)resource + aspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration + resource:(SCCaptureResource *)resource + context:(NSString *)context; + +- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler + resource:(SCCaptureResource *)resource + context:(NSString *)context; + +@property (nonatomic, strong, readonly) SCCaptureStateMachineBookKeeper *bookKeeper; +@end diff --git a/ManagedCapturer/StateMachine/SCCaptureBaseState.m b/ManagedCapturer/StateMachine/SCCaptureBaseState.m new file mode 100644 index 0000000..569ab54 --- /dev/null +++ b/ManagedCapturer/StateMachine/SCCaptureBaseState.m @@ -0,0 +1,169 @@ +// +// SCCaptureBaseState.m +// Snapchat +// +// Created by Lin Jia on 10/19/17. +// +// + +#import "SCCaptureBaseState.h" + +#import "SCCaptureStateMachineBookKeeper.h" +#import "SCCapturerToken.h" +#import "SCManagedCapturerV1_Private.h" + +#import +#import +#import + +@implementation SCCaptureBaseState { + SCCaptureStateMachineBookKeeper *_bookKeeper; + SCQueuePerformer *_performer; + __weak id _delegate; +} + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate +{ + self = [super init]; + if (self) { + SCAssert(performer, @""); + SCAssert(bookKeeper, @""); + _bookKeeper = bookKeeper; + _performer = performer; + _delegate = delegate; + } + return self; +} + +- (SCCaptureStateMachineStateId)stateId +{ + return SCCaptureBaseStateId; +} + +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"didBecomeCurrentState" context:context]; +} + +- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"initializeCaptureWithDevicePosition" context:context]; +} + +- (void)startRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"startRunningWithCapturerToken" context:context]; +} + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCAssertPerformer(_performer); + BOOL actuallyStopped = [[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token + completionHandler:completionHandler + context:context]; + // TODO: Fix CCAM-14450 + // This is a temporary solution for https://jira.sc-corp.net/browse/CCAM-14450 + // It is caused by switching from scanning state to stop running state when the view is disappearing in the scanning + // state, which can be reproduced by triggering scanning and then switch to maps page. + // We remove SCAssert to ingore the crashes in master branch and will find a solution for the illegal call for the + // state machine later + + if (self.stateId != SCCaptureScanningStateId) { + SCAssert(!actuallyStopped, @"actuallyStopped in state: %@ with context: %@", SCCaptureStateName([self stateId]), + context); + } else { + SCLogCaptureStateMachineInfo(@"actuallyStopped:%d in state: %@ with context: %@", actuallyStopped, + SCCaptureStateName([self stateId]), context); + } + + if (actuallyStopped) { + [_delegate currentState:self + requestToTransferToNewState:SCCaptureInitializedStateId + payload:nil + context:context]; + } +} + +- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource + audioConfiguration:(SCAudioConfiguration *)configuration + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"prepareForRecordingWithResource" context:context]; +} + +- (void)startRecordingWithResource:(SCCaptureResource *)resource + audioConfiguration:(SCAudioConfiguration *)configuration + outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"startRecordingWithResource" context:context]; +} + +- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"stopRecordingWithResource" context:context]; +} + +- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"cancelRecordingWithResource" context:context]; +} + +- (void)captureStillImageWithResource:(SCCaptureResource *)resource + aspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"captureStillImageWithResource" context:context]; +} + +- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + [self _handleBaseStateBehavior:@"startScanWithScanConfiguration" context:context]; +} + +- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + // Temporary solution until IDT-12520 is resolved. + [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource]; + //[self _handleBaseStateBehavior:@"stopScanWithCompletionHandler"]; +} + +- (void)_handleBaseStateBehavior:(NSString *)illegalAPIName context:(NSString *)context +{ + [_bookKeeper state:[self stateId] + illegalAPIcalled:illegalAPIName + callStack:[NSThread callStackSymbols] + context:context]; + if (SCIsDebugBuild()) { + SCAssertFail(@"illegal API invoked on capture state machine"); + } +} + +- (SCCaptureStateMachineBookKeeper *)bookKeeper +{ + return _bookKeeper; +} +@end diff --git a/ManagedCapturer/StateMachine/SCCaptureStateDelegate.h b/ManagedCapturer/StateMachine/SCCaptureStateDelegate.h new file mode 100644 index 0000000..f07766f --- /dev/null +++ b/ManagedCapturer/StateMachine/SCCaptureStateDelegate.h @@ -0,0 +1,30 @@ +// +// SCCaptureStateDelegate.h +// Snapchat +// +// Created by Lin Jia on 10/27/17. +// +// + +#import "SCCaptureStateUtil.h" + +#import + +@class SCCaptureBaseState; +@class SCStateTransitionPayload; +/* + The state machine state delegate is used by state machine states to hint to the system that "I am done, now transfer + to other state". + + Currently, SCCaptureStateMachineContext is the central piece that glues all states together, and it is the delegate for + those states. + */ + +@protocol SCCaptureStateDelegate + +- (void)currentState:(SCCaptureBaseState *)state + requestToTransferToNewState:(SCCaptureStateMachineStateId)newState + payload:(SCStateTransitionPayload *)payload + context:(NSString *)context; + +@end diff --git a/ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.h b/ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.h new file mode 100644 index 0000000..24ea585 --- /dev/null +++ b/ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.h @@ -0,0 +1,29 @@ +// +// SCCaptureStateTransitionBookKeeper.h +// Snapchat +// +// Created by Lin Jia on 10/27/17. +// +// + +#import "SCCaptureStateUtil.h" + +#import + +/* + Book keeper is used to record every state transition, and every illegal API call. + */ + +@interface SCCaptureStateMachineBookKeeper : NSObject + +- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId + to:(SCCaptureStateMachineStateId)toId + context:(NSString *)context; + +- (void)state:(SCCaptureStateMachineStateId)captureState + illegalAPIcalled:(NSString *)illegalAPIName + callStack:(NSArray *)callStack + context:(NSString *)context; + +- (void)logAPICalled:(NSString *)apiName context:(NSString *)context; +@end diff --git a/ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.m b/ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.m new file mode 100644 index 0000000..7d9c466 --- /dev/null +++ b/ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.m @@ -0,0 +1,63 @@ +// +// SCCaptureStateTransitionBookKeeper.m +// Snapchat +// +// Created by Lin Jia on 10/27/17. +// +// + +#import "SCCaptureStateMachineBookKeeper.h" + +#import "SCCaptureStateUtil.h" +#import "SCLogger+Camera.h" + +#import +#import + +@interface SCCaptureStateMachineBookKeeper () { + NSDate *_lastStateStartTime; +} +@end + +@implementation SCCaptureStateMachineBookKeeper + +- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId + to:(SCCaptureStateMachineStateId)toId + context:(NSString *)context +{ + NSDate *date = [NSDate date]; + SCLogCaptureStateMachineInfo(@"State %@ life span: %f seconds, transition to: %@, in context:%@, at: %@ \n", + SCCaptureStateName(fromId), [date timeIntervalSinceDate:_lastStateStartTime], + SCCaptureStateName(toId), context, date); + _lastStateStartTime = date; +} + +- (void)state:(SCCaptureStateMachineStateId)captureState + illegalAPIcalled:(NSString *)illegalAPIName + callStack:(NSArray *)callStack + context:(NSString *)context + +{ + SCAssert(callStack, @"call stack empty"); + SCAssert(illegalAPIName, @""); + SCAssert(context, @"Context is empty"); + SCLogCaptureStateMachineError(@"State: %@, illegal API invoke: %@, at: %@, callstack: %@ \n", + SCCaptureStateName(captureState), illegalAPIName, [NSDate date], callStack); + NSArray *reportedArray = + [callStack count] > 15 ? [callStack subarrayWithRange:NSMakeRange(0, 15)] : callStack; + [[SCLogger sharedInstance] logEvent:kSCCameraStateMachineIllegalAPICall + parameters:@{ + @"state" : SCCaptureStateName(captureState), + @"API" : illegalAPIName, + @"call_stack" : reportedArray, + @"context" : context + }]; +} + +- (void)logAPICalled:(NSString *)apiName context:(NSString *)context +{ + SCAssert(apiName, @"API name is empty"); + SCAssert(context, @"Context is empty"); + SCLogCaptureStateMachineInfo(@"api: %@ context: %@", apiName, context); +} +@end diff --git a/ManagedCapturer/StateMachine/SCCaptureStateMachineContext.h b/ManagedCapturer/StateMachine/SCCaptureStateMachineContext.h new file mode 100644 index 0000000..1e98943 --- /dev/null +++ b/ManagedCapturer/StateMachine/SCCaptureStateMachineContext.h @@ -0,0 +1,76 @@ +// +// SCCaptureStateMachineContext.h +// Snapchat +// +// Created by Lin Jia on 10/18/17. +// +// + +#import "SCCaptureCommon.h" +#import "SCManagedCaptureDevice.h" + +#import + +#import + +/* + SCCaptureStateMachineContext is the central piece that glues all states together. + + It will pass API calls to the current state. + + The classic state machine design pattern: + https://en.wikipedia.org/wiki/State_pattern + + It is also the delegate for the states it manages, so that those states can tell stateMachineContext to transit to next + state. + */ + +@class SCCaptureResource; + +@class SCCapturerToken; + +@interface SCCaptureStateMachineContext : NSObject + +- (instancetype)initWithResource:(SCCaptureResource *)resource; + +- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler; + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + after:(NSTimeInterval)delay + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration + context:(NSString *)context; + +- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)stopRecordingWithContext:(NSString *)context; + +- (void)cancelRecordingWithContext:(NSString *)context; + +- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler: + (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context; + +#pragma mark - Scanning +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context; +- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; + +@end diff --git a/ManagedCapturer/StateMachine/SCCaptureStateMachineContext.m b/ManagedCapturer/StateMachine/SCCaptureStateMachineContext.m new file mode 100644 index 0000000..5fd1b7a --- /dev/null +++ b/ManagedCapturer/StateMachine/SCCaptureStateMachineContext.m @@ -0,0 +1,301 @@ +// +// SCCaptureStateMachineContext.m +// Snapchat +// +// Created by Lin Jia on 10/18/17. +// +// + +#import "SCCaptureStateMachineContext.h" + +#import "SCCaptureBaseState.h" +#import "SCCaptureImageState.h" +#import "SCCaptureImageWhileRecordingState.h" +#import "SCCaptureInitializedState.h" +#import "SCCaptureRecordingState.h" +#import "SCCaptureResource.h" +#import "SCCaptureRunningState.h" +#import "SCCaptureScanningState.h" +#import "SCCaptureStateMachineBookKeeper.h" +#import "SCCaptureStateUtil.h" +#import "SCCaptureUninitializedState.h" +#import "SCCaptureWorker.h" +#import "SCCapturerToken.h" +#import "SCStateTransitionPayload.h" + +#import +#import +#import +#import +#import +#import + +@interface SCCaptureStateMachineContext () { + SCQueuePerformer *_queuePerformer; + + // Cache all the states. + NSMutableDictionary *_states; + SCCaptureBaseState *_currentState; + SCCaptureStateMachineBookKeeper *_bookKeeper; + SCCaptureResource *_captureResource; +} +@end + +@implementation SCCaptureStateMachineContext + +- (instancetype)initWithResource:(SCCaptureResource *)resource +{ + self = [super init]; + if (self) { + SCAssert(resource, @""); + SCAssert(resource.queuePerformer, @""); + _captureResource = resource; + _queuePerformer = resource.queuePerformer; + _states = [[NSMutableDictionary alloc] init]; + _bookKeeper = [[SCCaptureStateMachineBookKeeper alloc] init]; + [self _setCurrentState:SCCaptureUninitializedStateId payload:nil context:SCCapturerContext]; + } + return self; +} + +- (void)_setCurrentState:(SCCaptureStateMachineStateId)stateId + payload:(SCStateTransitionPayload *)payload + context:(NSString *)context +{ + switch (stateId) { + case SCCaptureUninitializedStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureUninitializedState *uninitializedState = + [[SCCaptureUninitializedState alloc] initWithPerformer:_queuePerformer + bookKeeper:_bookKeeper + delegate:self]; + [_states setObject:uninitializedState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureInitializedStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureInitializedState *initializedState = + [[SCCaptureInitializedState alloc] initWithPerformer:_queuePerformer + bookKeeper:_bookKeeper + delegate:self]; + [_states setObject:initializedState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureRunningStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureRunningState *runningState = + [[SCCaptureRunningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; + [_states setObject:runningState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureImageStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureImageState *captureImageState = + [[SCCaptureImageState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; + [_states setObject:captureImageState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureImageWhileRecordingStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureImageWhileRecordingState *captureImageWhileRecordingState = + [[SCCaptureImageWhileRecordingState alloc] initWithPerformer:_queuePerformer + bookKeeper:_bookKeeper + delegate:self]; + [_states setObject:captureImageWhileRecordingState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureScanningStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureScanningState *scanningState = + [[SCCaptureScanningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; + [_states setObject:scanningState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + case SCCaptureRecordingStateId: + if (![_states objectForKey:@(stateId)]) { + SCCaptureRecordingState *recordingState = [[SCCaptureRecordingState alloc] initWithPerformer:_queuePerformer + bookKeeper:_bookKeeper + delegate:self]; + [_states setObject:recordingState forKey:@(stateId)]; + } + _currentState = [_states objectForKey:@(stateId)]; + break; + default: + SCAssert(NO, @"illigal state Id"); + break; + } + [_currentState didBecomeCurrentState:payload resource:_captureResource context:context]; +} + +- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + [SCCaptureWorker setupCapturePreviewLayerController]; + + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState initializeCaptureWithDevicePosition:devicePosition + resource:_captureResource + completionHandler:completionHandler + context:context]; + }]; +} + +- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler +{ + [[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@""]; + + SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context]; + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState startRunningWithCapturerToken:token + resource:_captureResource + completionHandler:completionHandler + context:context]; + }]; + + return token; +} + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState stopRunningWithCapturerToken:token + resource:_captureResource + completionHandler:completionHandler + context:context]; + }]; +} + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + after:(NSTimeInterval)delay + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState stopRunningWithCapturerToken:token + resource:_captureResource + completionHandler:completionHandler + context:context]; + } + after:delay]; +} + +- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration + context:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState prepareForRecordingWithResource:_captureResource + audioConfiguration:configuration + context:context]; + }]; +} + +- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState startRecordingWithResource:_captureResource + audioConfiguration:configuration + outputSettings:outputSettings + maxDuration:maxDuration + fileURL:fileURL + captureSessionID:captureSessionID + completionHandler:completionHandler + context:context]; + }]; +} + +- (void)stopRecordingWithContext:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState stopRecordingWithResource:_captureResource context:context]; + }]; +} + +- (void)cancelRecordingWithContext:(NSString *)context +{ + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_queuePerformer perform:^{ + SCTraceResume(resumeToken); + [_currentState cancelRecordingWithResource:_captureResource context:context]; + }]; +} + +- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler: + (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context +{ + [_queuePerformer perform:^() { + [_currentState captureStillImageWithResource:_captureResource + aspectRatio:aspectRatio + captureSessionID:captureSessionID + completionHandler:completionHandler + context:context]; + }]; +} + +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context +{ + [_queuePerformer perform:^() { + [_currentState startScanWithScanConfiguration:configuration resource:_captureResource context:context]; + }]; +} + +- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context +{ + [_queuePerformer perform:^() { + [_currentState stopScanWithCompletionHandler:completionHandler resource:_captureResource context:context]; + }]; +} + +- (void)currentState:(SCCaptureBaseState *)state + requestToTransferToNewState:(SCCaptureStateMachineStateId)newState + payload:(SCStateTransitionPayload *)payload + context:(NSString *)context +{ + SCAssertPerformer(_queuePerformer); + SCAssert(_currentState == state, @"state: %@ newState: %@ context:%@", SCCaptureStateName([state stateId]), + SCCaptureStateName(newState), context); + if (payload) { + SCAssert(payload.fromState == [state stateId], @"From state id check"); + SCAssert(payload.toState == newState, @"To state id check"); + } + + if (_currentState != state) { + return; + } + + [_bookKeeper stateTransitionFrom:[state stateId] to:newState context:context]; + [self _setCurrentState:newState payload:payload context:context]; +} + +@end diff --git a/ManagedCapturer/StateMachine/SCCaptureStateUtil.h b/ManagedCapturer/StateMachine/SCCaptureStateUtil.h new file mode 100644 index 0000000..1b8ca4a --- /dev/null +++ b/ManagedCapturer/StateMachine/SCCaptureStateUtil.h @@ -0,0 +1,37 @@ +// +// SCCaptureStateUtil.h +// Snapchat +// +// Created by Lin Jia on 10/27/17. +// +// + +#import "SCLogger+Camera.h" + +#import +#import + +#import + +#define SCLogCaptureStateMachineInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__) +#define SCLogCaptureStateMachineError(fmt, ...) SCLogCoreCameraError(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__) + +typedef NSNumber SCCaptureStateKey; + +typedef NS_ENUM(NSUInteger, SCCaptureStateMachineStateId) { + SCCaptureBaseStateId = 0, + SCCaptureUninitializedStateId, + SCCaptureInitializedStateId, + SCCaptureImageStateId, + SCCaptureImageWhileRecordingStateId, + SCCaptureRunningStateId, + SCCaptureRecordingStateId, + SCCaptureScanningStateId, + SCCaptureStateMachineStateIdCount +}; + +SC_EXTERN_C_BEGIN + +NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId); + +SC_EXTERN_C_END diff --git a/ManagedCapturer/StateMachine/SCCaptureStateUtil.m b/ManagedCapturer/StateMachine/SCCaptureStateUtil.m new file mode 100644 index 0000000..deb20a7 --- /dev/null +++ b/ManagedCapturer/StateMachine/SCCaptureStateUtil.m @@ -0,0 +1,38 @@ +// +// SCCaptureStateUtil.m +// Snapchat +// +// Created by Lin Jia on 10/27/17. +// +// + +#import "SCCaptureStateUtil.h" + +#import +#import + +NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId) +{ + switch (stateId) { + case SCCaptureBaseStateId: + return @"SCCaptureBaseStateId"; + case SCCaptureUninitializedStateId: + return @"SCCaptureUninitializedStateId"; + case SCCaptureInitializedStateId: + return @"SCCaptureInitializedStateId"; + case SCCaptureImageStateId: + return @"SCCaptureImageStateId"; + case SCCaptureImageWhileRecordingStateId: + return @"SCCaptureImageWhileRecordingStateId"; + case SCCaptureRunningStateId: + return @"SCCaptureRunningStateId"; + case SCCaptureRecordingStateId: + return @"SCCaptureRecordingStateId"; + case SCCaptureScanningStateId: + return @"SCCaptureScanningStateId"; + default: + SCCAssert(NO, @"illegate state id"); + break; + } + return @"SCIllegalStateId"; +} diff --git a/ManagedCapturer/StateMachine/SCManagedCapturerLogging.h b/ManagedCapturer/StateMachine/SCManagedCapturerLogging.h new file mode 100644 index 0000000..069b438 --- /dev/null +++ b/ManagedCapturer/StateMachine/SCManagedCapturerLogging.h @@ -0,0 +1,12 @@ +// +// SCManagedCapturerLogging.h +// Snapchat +// +// Created by Lin Jia on 11/13/17. +// + +#import + +#define SCLogCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__) +#define SCLogCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__) +#define SCLogCapturerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__) diff --git a/ManagedCapturer/StateMachine/States/SCCaptureImageState.h b/ManagedCapturer/StateMachine/States/SCCaptureImageState.h new file mode 100644 index 0000000..561b43f --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureImageState.h @@ -0,0 +1,22 @@ +// +// SCCaptureImageState.h +// Snapchat +// +// Created by Lin Jia on 1/8/18. +// + +#import "SCCaptureBaseState.h" + +#import + +@class SCQueuePerformer; + +@interface SCCaptureImageState : SCCaptureBaseState + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate; + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureImageState.m b/ManagedCapturer/StateMachine/States/SCCaptureImageState.m new file mode 100644 index 0000000..d26a0f4 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureImageState.m @@ -0,0 +1,65 @@ +// +// SCCaptureImageState.m +// Snapchat +// +// Created by Lin Jia on 1/8/18. +// + +#import "SCCaptureImageState.h" + +#import "SCCaptureImageStateTransitionPayload.h" +#import "SCManagedCapturerV1_Private.h" +#import "SCStateTransitionPayload.h" + +#import +#import + +@interface SCCaptureImageState () { + __weak id _delegate; + SCQueuePerformer *_performer; +} +@end + +@implementation SCCaptureImageState + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate +{ + self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; + if (self) { + _delegate = delegate; + _performer = performer; + } + return self; +} + +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + SCAssertPerformer(_performer); + SCAssert(payload.toState == [self stateId], @""); + if (![payload isKindOfClass:[SCCaptureImageStateTransitionPayload class]]) { + SCAssertFail(@"wrong payload pass in"); + [_delegate currentState:self requestToTransferToNewState:payload.fromState payload:nil context:context]; + return; + } + SCCaptureImageStateTransitionPayload *captureImagePayload = (SCCaptureImageStateTransitionPayload *)payload; + + [SCCaptureWorker + captureStillImageWithCaptureResource:resource + aspectRatio:captureImagePayload.aspectRatio + captureSessionID:captureImagePayload.captureSessionID + shouldCaptureFromVideo:[SCCaptureWorker shouldCaptureImageFromVideoWithResource:resource] + completionHandler:captureImagePayload.block + context:context]; + + [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; +} + +- (SCCaptureStateMachineStateId)stateId +{ + return SCCaptureImageStateId; +} +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.h b/ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.h new file mode 100644 index 0000000..ea82816 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.h @@ -0,0 +1,29 @@ +// +// SCCaptureImageStateTransitionPayload.h +// Snapchat +// +// Created by Lin Jia on 1/9/18. +// + +#import "SCCaptureCommon.h" +#import "SCStateTransitionPayload.h" + +#import + +@interface SCCaptureImageStateTransitionPayload : SCStateTransitionPayload + +@property (nonatomic, readonly, strong) NSString *captureSessionID; + +@property (nonatomic, readonly, copy) sc_managed_capturer_capture_still_image_completion_handler_t block; + +@property (nonatomic, readonly, assign) CGFloat aspectRatio; + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState + toState:(SCCaptureStateMachineStateId)toState + captureSessionId:(NSString *)captureSessionID + aspectRatio:(CGFloat)aspectRatio + completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block; + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.m b/ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.m new file mode 100644 index 0000000..45ba345 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.m @@ -0,0 +1,27 @@ +// +// SCCaptureImageStateTransitionPayload.m +// Snapchat +// +// Created by Lin Jia on 1/9/18. +// + +#import "SCCaptureImageStateTransitionPayload.h" + +@implementation SCCaptureImageStateTransitionPayload + +- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState + toState:(SCCaptureStateMachineStateId)toState + captureSessionId:(NSString *)captureSessionID + aspectRatio:(CGFloat)aspectRatio + completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block +{ + self = [super initWithFromState:fromState toState:toState]; + if (self) { + _captureSessionID = captureSessionID; + _aspectRatio = aspectRatio; + _block = block; + } + return self; +} + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.h b/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.h new file mode 100644 index 0000000..281b0a4 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.h @@ -0,0 +1,22 @@ +// +// SCCaptureImageWhileRecordingState.h +// Snapchat +// +// Created by Sun Lei on 22/02/2018. +// + +#import "SCCaptureBaseState.h" + +#import + +@class SCQueuePerformer; + +@interface SCCaptureImageWhileRecordingState : SCCaptureBaseState + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate; + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.m b/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.m new file mode 100644 index 0000000..eb1e4e1 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.m @@ -0,0 +1,85 @@ +// +// SCCaptureImageWhileRecordingState.m +// Snapchat +// +// Created by Sun Lei on 22/02/2018. +// + +#import "SCCaptureImageWhileRecordingState.h" + +#import "SCCaptureImageWhileRecordingStateTransitionPayload.h" +#import "SCManagedCapturerV1_Private.h" + +#import +#import + +@interface SCCaptureImageWhileRecordingState () { + __weak id _delegate; + SCQueuePerformer *_performer; +} +@end + +@implementation SCCaptureImageWhileRecordingState + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate +{ + self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; + if (self) { + _delegate = delegate; + _performer = performer; + } + return self; +} + +- (SCCaptureStateMachineStateId)stateId +{ + return SCCaptureImageWhileRecordingStateId; +} + +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + SCAssertPerformer(_performer); + SCAssert(payload.fromState == SCCaptureRecordingStateId, @""); + SCAssert(payload.toState == [self stateId], @""); + SCAssert([payload isKindOfClass:[SCCaptureImageWhileRecordingStateTransitionPayload class]], @""); + ; + SCCaptureImageWhileRecordingStateTransitionPayload *captureImagePayload = + (SCCaptureImageWhileRecordingStateTransitionPayload *)payload; + + @weakify(self); + sc_managed_capturer_capture_still_image_completion_handler_t block = + ^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error, SCManagedCapturerState *state) { + captureImagePayload.block(fullScreenImage, metadata, error, state); + [_performer perform:^{ + @strongify(self); + [self _cancelRecordingWithContext:context resource:resource]; + }]; + }; + + [SCCaptureWorker + captureStillImageWithCaptureResource:resource + aspectRatio:captureImagePayload.aspectRatio + captureSessionID:captureImagePayload.captureSessionID + shouldCaptureFromVideo:[SCCaptureWorker shouldCaptureImageFromVideoWithResource:resource] + completionHandler:block + context:context]; + + [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; +} + +- (void)_cancelRecordingWithContext:(NSString *)context resource:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCAssertPerformer(_performer); + + [SCCaptureWorker cancelRecordingWithCaptureResource:resource]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.h b/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.h new file mode 100644 index 0000000..7079a10 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.h @@ -0,0 +1,29 @@ +// +// SCCaptureImageWhileRecordingStateTransitionPayload.h +// Snapchat +// +// Created by Sun Lei on 22/02/2018. +// + +#import "SCCaptureCommon.h" +#import "SCStateTransitionPayload.h" + +#import + +@interface SCCaptureImageWhileRecordingStateTransitionPayload : SCStateTransitionPayload + +@property (nonatomic, readonly, strong) NSString *captureSessionID; + +@property (nonatomic, readonly, copy) sc_managed_capturer_capture_still_image_completion_handler_t block; + +@property (nonatomic, readonly, assign) CGFloat aspectRatio; + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState + toState:(SCCaptureStateMachineStateId)toState + captureSessionId:(NSString *)captureSessionID + aspectRatio:(CGFloat)aspectRatio + completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block; + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.m b/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.m new file mode 100644 index 0000000..ae4f271 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.m @@ -0,0 +1,27 @@ +// +// SCCaptureImageWhileRecordingStateTransitionPayload.m +// Snapchat +// +// Created by Sun Lei on 22/02/2018. +// + +#import "SCCaptureImageWhileRecordingStateTransitionPayload.h" + +@implementation SCCaptureImageWhileRecordingStateTransitionPayload + +- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState + toState:(SCCaptureStateMachineStateId)toState + captureSessionId:(NSString *)captureSessionID + aspectRatio:(CGFloat)aspectRatio + completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block +{ + self = [super initWithFromState:fromState toState:toState]; + if (self) { + _captureSessionID = captureSessionID; + _aspectRatio = aspectRatio; + _block = block; + } + return self; +} + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureInitializedState.h b/ManagedCapturer/StateMachine/States/SCCaptureInitializedState.h new file mode 100644 index 0000000..5d5876c --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureInitializedState.h @@ -0,0 +1,22 @@ +// +// SCCaptureInitializedState.h +// Snapchat +// +// Created by Jingtian Yang on 20/12/2017. +// + +#import "SCCaptureBaseState.h" + +#import + +@class SCQueuePerformer; + +@interface SCCaptureInitializedState : SCCaptureBaseState + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate; + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureInitializedState.m b/ManagedCapturer/StateMachine/States/SCCaptureInitializedState.m new file mode 100644 index 0000000..7a687a6 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureInitializedState.m @@ -0,0 +1,68 @@ +// +// SCCaptureInitializedState.m +// Snapchat +// +// Created by Jingtian Yang on 20/12/2017. +// + +#import "SCCaptureInitializedState.h" + +#import "SCCapturerToken.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerV1_Private.h" + +#import +#import + +@interface SCCaptureInitializedState () { + __weak id _delegate; + SCQueuePerformer *_performer; +} + +@end + +@implementation SCCaptureInitializedState + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate +{ + self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; + if (self) { + _delegate = delegate; + _performer = performer; + } + return self; +} + +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + // No op. +} + +- (SCCaptureStateMachineStateId)stateId +{ + return SCCaptureInitializedStateId; +} + +- (void)startRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCAssertPerformer(_performer); + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token); + + [SCCaptureWorker startRunningWithCaptureResource:resource token:token completionHandler:completionHandler]; + + [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureRecordingState.h b/ManagedCapturer/StateMachine/States/SCCaptureRecordingState.h new file mode 100644 index 0000000..a6bbbf0 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureRecordingState.h @@ -0,0 +1,22 @@ +// +// SCCaptureRecordingState.h +// Snapchat +// +// Created by Jingtian Yang on 12/01/2018. +// + +#import "SCCaptureBaseState.h" + +#import + +@class SCQueuePerformer; + +@interface SCCaptureRecordingState : SCCaptureBaseState + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate; + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureRecordingState.m b/ManagedCapturer/StateMachine/States/SCCaptureRecordingState.m new file mode 100644 index 0000000..fb7513c --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureRecordingState.m @@ -0,0 +1,114 @@ +// +// SCCaptureRecordingState.m +// Snapchat +// +// Created by Jingtian Yang on 12/01/2018. +// + +#import "SCCaptureRecordingState.h" + +#import "SCCaptureImageWhileRecordingStateTransitionPayload.h" +#import "SCCaptureRecordingStateTransitionPayload.h" +#import "SCManagedCapturerV1_Private.h" +#import "SCStateTransitionPayload.h" + +#import +#import + +@interface SCCaptureRecordingState () { + __weak id _delegate; + SCQueuePerformer *_performer; +} +@end + +@implementation SCCaptureRecordingState + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate +{ + self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; + if (self) { + _delegate = delegate; + _performer = performer; + } + return self; +} + +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + SCAssertPerformer(resource.queuePerformer); + SCAssert(payload.toState == [self stateId], @""); + if (![payload isKindOfClass:[SCCaptureRecordingStateTransitionPayload class]]) { + SCAssertFail(@"wrong payload pass in"); + [_delegate currentState:self requestToTransferToNewState:payload.fromState payload:nil context:context]; + return; + } + + SCCaptureRecordingStateTransitionPayload *recordingPayload = (SCCaptureRecordingStateTransitionPayload *)payload; + [SCCaptureWorker startRecordingWithCaptureResource:resource + outputSettings:recordingPayload.outputSettings + audioConfiguration:recordingPayload.configuration + maxDuration:recordingPayload.maxDuration + fileURL:recordingPayload.fileURL + captureSessionID:recordingPayload.captureSessionID + completionHandler:recordingPayload.block]; +} + +- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertPerformer(_performer); + + [SCCaptureWorker stopRecordingWithCaptureResource:resource]; + [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertPerformer(_performer); + + [SCCaptureWorker cancelRecordingWithCaptureResource:resource]; + [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +- (SCCaptureStateMachineStateId)stateId +{ + return SCCaptureRecordingStateId; +} + +- (void)captureStillImageWithResource:(SCCaptureResource *)resource + aspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCAssertPerformer(_performer); + SCCaptureImageWhileRecordingStateTransitionPayload *payload = [ + [SCCaptureImageWhileRecordingStateTransitionPayload alloc] initWithFromState:SCCaptureRecordingStateId + toState:SCCaptureImageWhileRecordingStateId + captureSessionId:captureSessionID + aspectRatio:aspectRatio + completionHandler:completionHandler]; + [_delegate currentState:self + requestToTransferToNewState:SCCaptureImageWhileRecordingStateId + payload:payload + context:context]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.h b/ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.h new file mode 100644 index 0000000..4995daa --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.h @@ -0,0 +1,41 @@ +// +// SCCaptureRecordingStateTransitionPayload.h +// Snapchat +// +// Created by Jingtian Yang on 12/01/2018. +// + +#import "SCCaptureCommon.h" +#import "SCManagedVideoCapturerOutputSettings.h" +#import "SCStateTransitionPayload.h" + +#import + +#import + +@interface SCCaptureRecordingStateTransitionPayload : SCStateTransitionPayload + +@property (nonatomic, readonly, strong) SCManagedVideoCapturerOutputSettings *outputSettings; + +@property (nonatomic, readonly, strong) SCAudioConfiguration *configuration; + +@property (nonatomic, readonly, assign) NSTimeInterval maxDuration; + +@property (nonatomic, readonly, strong) NSURL *fileURL; + +@property (nonatomic, readonly, strong) NSString *captureSessionID; + +@property (nonatomic, readonly, copy) sc_managed_capturer_start_recording_completion_handler_t block; + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState + toState:(SCCaptureStateMachineStateId)toState + outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)block; + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.m b/ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.m new file mode 100644 index 0000000..167031a --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.m @@ -0,0 +1,33 @@ +// +// SCCaptureRecordingStateTransitionPayload.m +// Snapchat +// +// Created by Jingtian Yang on 12/01/2018. +// + +#import "SCCaptureRecordingStateTransitionPayload.h" + +@implementation SCCaptureRecordingStateTransitionPayload + +- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState + toState:(SCCaptureStateMachineStateId)toState + outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)block +{ + self = [super initWithFromState:fromState toState:toState]; + if (self) { + _outputSettings = outputSettings; + _configuration = configuration; + _maxDuration = maxDuration; + _fileURL = fileURL; + _captureSessionID = captureSessionID; + _block = block; + } + return self; +} + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureRunningState.h b/ManagedCapturer/StateMachine/States/SCCaptureRunningState.h new file mode 100644 index 0000000..4912a4a --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureRunningState.h @@ -0,0 +1,22 @@ +// +// SCCaptureRunningState.h +// Snapchat +// +// Created by Jingtian Yang on 08/01/2018. +// + +#import "SCCaptureBaseState.h" + +#import + +@class SCQueuePerformer; + +@interface SCCaptureRunningState : SCCaptureBaseState + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate; + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureRunningState.m b/ManagedCapturer/StateMachine/States/SCCaptureRunningState.m new file mode 100644 index 0000000..3fd665e --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureRunningState.m @@ -0,0 +1,176 @@ +// +// SCCaptureRunningState.m +// Snapchat +// +// Created by Jingtian Yang on 08/01/2018. +// + +#import "SCCaptureRunningState.h" + +#import "SCCaptureImageStateTransitionPayload.h" +#import "SCCaptureRecordingStateTransitionPayload.h" +#import "SCCaptureWorker.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerV1_Private.h" +#import "SCScanConfiguration.h" + +#import +#import +#import + +@interface SCCaptureRunningState () { + __weak id _delegate; + SCQueuePerformer *_performer; +} + +@end + +@implementation SCCaptureRunningState + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate +{ + self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; + if (self) { + _delegate = delegate; + _performer = performer; + } + return self; +} + +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + // No op. +} + +- (void)captureStillImageWithResource:(SCCaptureResource *)resource + aspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCAssertPerformer(_performer); + SCCaptureImageStateTransitionPayload *payload = + [[SCCaptureImageStateTransitionPayload alloc] initWithFromState:SCCaptureRunningStateId + toState:SCCaptureImageStateId + captureSessionId:captureSessionID + aspectRatio:aspectRatio + completionHandler:completionHandler]; + [_delegate currentState:self requestToTransferToNewState:SCCaptureImageStateId payload:payload context:context]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +- (SCCaptureStateMachineStateId)stateId +{ + return SCCaptureRunningStateId; +} + +- (void)startRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCAssertPerformer(_performer); + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token); + [SCCaptureWorker startRunningWithCaptureResource:resource token:token completionHandler:completionHandler]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token + resource:(SCCaptureResource *)resource + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertPerformer(_performer); + + SCLogCapturerInfo(@"Stop running asynchronously. token:%@", token); + if ([[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token + completionHandler:completionHandler + context:context]) { + [_delegate currentState:self + requestToTransferToNewState:SCCaptureInitializedStateId + payload:nil + context:context]; + } + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Start scan on preview asynchronously. configuration:%@", configuration); + SCAssertPerformer(_performer); + [SCCaptureWorker startScanWithScanConfiguration:configuration resource:resource]; + [_delegate currentState:self requestToTransferToNewState:SCCaptureScanningStateId payload:nil context:context]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource + audioConfiguration:(SCAudioConfiguration *)configuration + context:(NSString *)context +{ + SCAssertPerformer(_performer); + SCTraceODPCompatibleStart(2); + [SCCaptureWorker prepareForRecordingWithAudioConfiguration:configuration resource:resource]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +- (void)startRecordingWithResource:(SCCaptureResource *)resource + audioConfiguration:(SCAudioConfiguration *)configuration + outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertPerformer(_performer); + + SCCaptureRecordingStateTransitionPayload *payload = + [[SCCaptureRecordingStateTransitionPayload alloc] initWithFromState:SCCaptureRunningStateId + toState:SCCaptureRecordingStateId + outputSettings:outputSettings + audioConfiguration:configuration + maxDuration:maxDuration + fileURL:fileURL + captureSessionID:captureSessionID + completionHandler:completionHandler]; + [_delegate currentState:self requestToTransferToNewState:SCCaptureRecordingStateId payload:payload context:context]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context +{ + // Intentionally No Op, this will be removed once CCAM-13851 gets resolved. + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureScanningState.h b/ManagedCapturer/StateMachine/States/SCCaptureScanningState.h new file mode 100644 index 0000000..0e60f79 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureScanningState.h @@ -0,0 +1,18 @@ +// +// SCCaptureScanningState.h +// Snapchat +// +// Created by Xiaokang Liu on 09/01/2018. +// + +#import "SCCaptureBaseState.h" + +@class SCQueuePerformer; + +@interface SCCaptureScanningState : SCCaptureBaseState +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate; +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureScanningState.m b/ManagedCapturer/StateMachine/States/SCCaptureScanningState.m new file mode 100644 index 0000000..7b6f0e7 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureScanningState.m @@ -0,0 +1,75 @@ +// +// SCCaptureScanningState.m +// Snapchat +// +// Created by Xiaokang Liu on 09/01/2018. +// + +#import "SCCaptureScanningState.h" + +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerV1_Private.h" + +#import +#import +#import + +@interface SCCaptureScanningState () { + __weak id _delegate; + SCQueuePerformer *_performer; +} + +@end + +@implementation SCCaptureScanningState +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate +{ + self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; + if (self) { + SCAssert(delegate, @""); + SCAssert(performer, @""); + SCAssert(bookKeeper, @""); + _delegate = delegate; + _performer = performer; + } + return self; +} + +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + // No op. +} + +- (SCCaptureStateMachineStateId)stateId +{ + return SCCaptureScanningStateId; +} + +- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + SCAssertPerformer(_performer); + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"stop scan asynchronously."); + [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource]; + [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context +{ + // Intentionally No Op, this will be removed once CCAM-13851 gets resolved. + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.h b/ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.h new file mode 100644 index 0000000..0809581 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.h @@ -0,0 +1,26 @@ +// +// SCCaptureUninitializedState.h +// Snapchat +// +// Created by Lin Jia on 10/19/17. +// +// + +#import "SCCaptureBaseState.h" + +#import + +/* + State which handles capture initialialization, which should be used only once for every app life span. +*/ +@class SCQueuePerformer; + +@interface SCCaptureUninitializedState : SCCaptureBaseState + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate; + +@end diff --git a/ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.m b/ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.m new file mode 100644 index 0000000..ffe99bf --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.m @@ -0,0 +1,70 @@ +// +// SCCaptureUninitializedState.m +// Snapchat +// +// Created by Lin Jia on 10/19/17. +// +// + +#import "SCCaptureUninitializedState.h" + +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerV1_Private.h" + +#import +#import +#import + +@interface SCCaptureUninitializedState () { + __weak id _delegate; + SCQueuePerformer *_performer; +} + +@end + +@implementation SCCaptureUninitializedState + +- (instancetype)initWithPerformer:(SCQueuePerformer *)performer + bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper + delegate:(id)delegate +{ + self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; + if (self) { + _delegate = delegate; + _performer = performer; + } + return self; +} + +- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload + resource:(SCCaptureResource *)resource + context:(NSString *)context +{ + // No op. +} + +- (SCCaptureStateMachineStateId)stateId +{ + return SCCaptureUninitializedStateId; +} + +- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition + resource:(SCCaptureResource *)resource + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCAssertPerformer(_performer); + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Setting up with devicePosition:%lu", (unsigned long)devicePosition); + + // TODO: we need to push completionHandler to a payload and let intializedState handle. + [[SCManagedCapturerV1 sharedInstance] setupWithDevicePosition:devicePosition completionHandler:completionHandler]; + + [_delegate currentState:self requestToTransferToNewState:SCCaptureInitializedStateId payload:nil context:context]; + + NSString *apiName = + [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; + [self.bookKeeper logAPICalled:apiName context:context]; +} + +@end diff --git a/ManagedCapturer/StateMachine/States/SCStateTransitionPayload.h b/ManagedCapturer/StateMachine/States/SCStateTransitionPayload.h new file mode 100644 index 0000000..8fca174 --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCStateTransitionPayload.h @@ -0,0 +1,22 @@ +// +// SCStateTransitionPayload.h +// Snapchat +// +// Created by Lin Jia on 1/8/18. +// + +#import "SCCaptureStateUtil.h" + +#import + +@interface SCStateTransitionPayload : NSObject + +@property (nonatomic, readonly, assign) SCCaptureStateMachineStateId fromState; + +@property (nonatomic, readonly, assign) SCCaptureStateMachineStateId toState; + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState; + +@end diff --git a/ManagedCapturer/StateMachine/States/SCStateTransitionPayload.m b/ManagedCapturer/StateMachine/States/SCStateTransitionPayload.m new file mode 100644 index 0000000..d4df2bd --- /dev/null +++ b/ManagedCapturer/StateMachine/States/SCStateTransitionPayload.m @@ -0,0 +1,27 @@ +// +// SCStateTransitionPayload.m +// Snapchat +// +// Created by Lin Jia on 1/8/18. +// + +#import "SCStateTransitionPayload.h" + +#import + +@implementation SCStateTransitionPayload + +- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState +{ + self = [super init]; + if (self) { + SCAssert(fromState != toState, @""); + SCAssert(fromState > SCCaptureBaseStateId && fromState < SCCaptureStateMachineStateIdCount, @""); + SCAssert(toState > SCCaptureBaseStateId && toState < SCCaptureStateMachineStateIdCount, @""); + _fromState = fromState; + _toState = toState; + } + return self; +} + +@end