diff --git a/Resource/SCCaptureResource.h b/Resource/SCCaptureResource.h new file mode 100644 index 0000000..00b28e6 --- /dev/null +++ b/Resource/SCCaptureResource.h @@ -0,0 +1,233 @@ +// +// SCCaptureResource.h +// Snapchat +// +// Created by Lin Jia on 10/19/17. +// +// + +#import "SCManagedCapturerLensAPI.h" +#import "SCManagedCapturerListenerAnnouncer.h" +#import "SCSnapCreationTriggers.h" + +#import + +#import + +#import + +/* + In general, the function of SCCapturer is to use some resources (such as SCManagedCapturerListenerAnnouncer), to do + something (such as announce an event). + + SCCaptureResource abstract away the "resources" part of SCCapturer. It has no APIs itself, it is used to be the + resource which gets passed arround for capturer V2 state machine. + */ +@class SCManagedDeviceCapacityAnalyzer; + +@class SCManagedCapturePreviewLayerController; + +@class ARSession; + +@class SCManagedVideoScanner; + +@class LSAGLView; + +@protocol SCManagedCapturerLSAComponentTrackerAPI; + +@class SCManagedStillImageCapturer; + +@class SCManagedVideoCapturer; + +@class SCQueuePerformer; + +@class SCManagedVideoFrameSampler; + +@class SCManagedDroppedFramesReporter; + +@class SCManagedVideoStreamReporter; + +@protocol SCManagedCapturerGLViewManagerAPI; + +@class SCCapturerToken; + +@class SCSingleFrameStreamCapturer; + +@class SCManagedFrontFlashController; + +@class SCManagedVideoCapturerHandler; + +@class SCManagedStillImageCapturerHandler; + +@class SCManagedDeviceCapacityAnalyzerHandler; + +@class SCManagedCaptureDeviceDefaultZoomHandler; + +@class SCManagedCaptureDeviceHandler; + +@class SCBlackCameraNoOutputDetector; + +@class SCCaptureSessionFixer; + +@protocol SCCaptureFaceDetector; + +@protocol SCManagedCapturerLensAPI; + +@protocol SCManagedCapturerARImageCaptureProvider; + +@class SCManagedCapturerARSessionHandler; + +@class SCManagedCaptureDeviceSubjectAreaHandler; + +@class SCManagedCaptureSession; + +@class SCBlackCameraDetector; + +@protocol SCLensProcessingCore; + +@protocol SCManagedCapturerLensAPI; + +@protocol SCManagedCapturePreviewLayerControllerDelegate; + +typedef enum : NSUInteger { + SCManagedCapturerStatusUnknown = 0, + SCManagedCapturerStatusReady, + SCManagedCapturerStatusRunning, +} SCManagedCapturerStatus; + +@protocol SCDeviceMotionProvider + +@property (nonatomic, readonly) BOOL isUnderDeviceMotion; + +@end + +@protocol SCFileInputDecider + +@property (nonatomic, readonly) BOOL shouldProcessFileInput; +@property (nonatomic, readonly) NSURL *fileURL; + +@end + +@interface SCCaptureResource : NSObject + +@property (nonatomic, readwrite, assign) SCManagedCapturerStatus status; + +@property (nonatomic, readwrite, strong) SCManagedCapturerState *state; + +@property (nonatomic, readwrite, strong) SCManagedCaptureDevice *device; + +@property (nonatomic, readwrite, strong) id lensProcessingCore; + +@property (nonatomic, readwrite, strong) id lensAPIProvider; + +@property (nonatomic, readwrite, strong) ARSession *arSession NS_AVAILABLE_IOS(11_0); + +@property (nonatomic, readwrite, strong) SCManagedStillImageCapturer *arImageCapturer NS_AVAILABLE_IOS(11_0); + +@property (nonatomic, readwrite, strong) SCManagedCaptureSession *managedSession; + +@property (nonatomic, readwrite, strong) id videoDataSource; + +@property (nonatomic, readwrite, strong) SCManagedDeviceCapacityAnalyzer *deviceCapacityAnalyzer; + +@property (nonatomic, readwrite, strong) SCManagedVideoScanner *videoScanner; + +@property (nonatomic, readwrite, strong) SCManagedCapturerListenerAnnouncer *announcer; + +@property (nonatomic, readwrite, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer; + +@property (nonatomic, readwrite, strong) id videoPreviewGLViewManager; + +@property (nonatomic, readwrite, strong) SCManagedStillImageCapturer *stillImageCapturer; + +@property (nonatomic, readwrite, strong) SCManagedVideoCapturer *videoCapturer; + +@property (nonatomic, readwrite, strong) SCQueuePerformer *queuePerformer; + +@property (nonatomic, readwrite, strong) SCManagedVideoFrameSampler *videoFrameSampler; + +@property (nonatomic, readwrite, strong) SCManagedDroppedFramesReporter *droppedFramesReporter; + +@property (nonatomic, readwrite, strong) SCManagedVideoStreamReporter *videoStreamReporter; // INTERNAL USE ONLY + +@property (nonatomic, readwrite, strong) SCManagedFrontFlashController *frontFlashController; + +@property (nonatomic, readwrite, strong) SCManagedVideoCapturerHandler *videoCapturerHandler; + +@property (nonatomic, readwrite, strong) SCManagedStillImageCapturerHandler *stillImageCapturerHandler; + +@property (nonatomic, readwrite, strong) SCManagedDeviceCapacityAnalyzerHandler *deviceCapacityAnalyzerHandler; + +@property (nonatomic, readwrite, strong) SCManagedCaptureDeviceDefaultZoomHandler *deviceZoomHandler; + +@property (nonatomic, readwrite, strong) SCManagedCaptureDeviceHandler *captureDeviceHandler; + +@property (nonatomic, readwrite, strong) id captureFaceDetector; + +@property (nonatomic, readwrite, strong) FBKVOController *kvoController; + +@property (nonatomic, readwrite, strong) id lsaTrackingComponentHandler; + +@property (nonatomic, readwrite, strong) SCManagedCapturerARSessionHandler *arSessionHandler; + +@property (nonatomic, assign) SEL completeARSessionShutdown; + +@property (nonatomic, assign) SEL handleAVSessionStatusChange; + +@property (nonatomic, assign) BOOL videoRecording; + +@property (nonatomic, assign) NSInteger numRetriesFixAVCaptureSessionWithCurrentSession; + +@property (nonatomic, assign) BOOL appInBackground; + +@property (nonatomic, assign) NSUInteger streamingSequence; + +@property (nonatomic, assign) BOOL stillImageCapturing; + +@property (nonatomic, readwrite, strong) NSTimer *livenessTimer; + +@property (nonatomic, readwrite, strong) NSMutableSet *tokenSet; + +@property (nonatomic, readwrite, strong) SCSingleFrameStreamCapturer *frameCap; + +@property (nonatomic, readwrite, strong) id sampleBufferDisplayController; + +@property (nonatomic, readwrite, strong) SCSnapCreationTriggers *snapCreationTriggers; + +// Different from most properties above, following are main thread properties. +@property (nonatomic, assign) BOOL allowsZoom; + +@property (nonatomic, assign) NSUInteger numRetriesFixInconsistencyWithCurrentSession; + +@property (nonatomic, readwrite, strong) NSMutableDictionary *debugInfoDict; + +@property (nonatomic, assign) BOOL notificationRegistered; + +@property (nonatomic, readwrite, strong) SCManagedCaptureDeviceSubjectAreaHandler *deviceSubjectAreaHandler; + +@property (nonatomic, assign) SEL sessionRuntimeError; + +@property (nonatomic, assign) SEL livenessConsistency; + +// TODO: these properties will be refactored into SCCaptureSessionFixer class +// The refactor will be in a separate PR +// Timestamp when _fixAVSessionIfNecessary is called +@property (nonatomic, assign) NSTimeInterval lastFixSessionTimestamp; +// Timestamp when session runtime error is handled +@property (nonatomic, assign) NSTimeInterval lastSessionRuntimeErrorTime; +// Wether we schedule fix of creating session already +@property (nonatomic, assign) BOOL isRecreateSessionFixScheduled; + +@property (nonatomic, readwrite, strong) SCCaptureSessionFixer *captureSessionFixer; + +@property (nonatomic, readwrite, strong) SCBlackCameraDetector *blackCameraDetector; + +@property (nonatomic, readwrite, strong) id deviceMotionProvider; + +@property (nonatomic, readwrite, strong) id arImageCaptureProvider; + +@property (nonatomic, readwrite, strong) id fileInputDecider; + +@property (nonatomic, readwrite, strong) + id previewLayerControllerDelegate; +@end diff --git a/Resource/SCCaptureResource.m b/Resource/SCCaptureResource.m new file mode 100644 index 0000000..562cd60 --- /dev/null +++ b/Resource/SCCaptureResource.m @@ -0,0 +1,66 @@ +// +// SCCaptureResource.m +// Snapchat +// +// Created by Lin Jia on 10/19/17. +// +// + +#import "SCCaptureResource.h" + +#import "SCBlackCameraDetector.h" +#import "SCManagedCapturerState.h" +#import "SCManagedFrontFlashController.h" +#import "SCManagedVideoCapturer.h" + +#import +#import +#import + +#import + +@interface SCCaptureResource () { + FBKVOController *_previewHiddenKVO; +} + +@end + +@implementation SCCaptureResource + +- (SCManagedFrontFlashController *)frontFlashController +{ + SCTraceODPCompatibleStart(2); + SCAssert([self.queuePerformer isCurrentPerformer], @""); + if (!_frontFlashController) { + _frontFlashController = [[SCManagedFrontFlashController alloc] init]; + } + return _frontFlashController; +} + +- (void)setVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)layer +{ + SC_GUARD_ELSE_RETURN(layer != _videoPreviewLayer); + + if (_videoPreviewLayer) { + [_previewHiddenKVO unobserve:_videoPreviewLayer]; + } + _videoPreviewLayer = layer; + + SC_GUARD_ELSE_RETURN(_videoPreviewLayer); + + if (!_previewHiddenKVO) { + _previewHiddenKVO = [[FBKVOController alloc] initWithObserver:self]; + } + + [_previewHiddenKVO observe:_videoPreviewLayer + keyPath:@keypath(_videoPreviewLayer.hidden) + options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld + block:^(id observer, id object, NSDictionary *change) { + BOOL oldValue = [change[NSKeyValueChangeOldKey] boolValue]; + BOOL newValue = [change[NSKeyValueChangeNewKey] boolValue]; + if (oldValue != newValue) { + [_blackCameraDetector capturePreviewDidBecomeVisible:!newValue]; + } + }]; +} +@end diff --git a/Tweaks/SCCameraTweaks.h b/Tweaks/SCCameraTweaks.h new file mode 100644 index 0000000..9a4ca49 --- /dev/null +++ b/Tweaks/SCCameraTweaks.h @@ -0,0 +1,377 @@ +// +// SCCameraTweaks.h +// Snapchat +// +// Created by Liu Liu on 9/16/15. +// Copyright © 2015 Snapchat, Inc. All rights reserved. +// + +#import +#import +#import + +#import + +#import +#import + +// Core Camera + +typedef NS_ENUM(NSUInteger, SCManagedCaptureDeviceZoomHandlerType) { + SCManagedCaptureDeviceDefaultZoom, + SCManagedCaptureDeviceSavitzkyGolayFilter, + SCManagedCaptureDeviceLinearInterpolation, +}; + +typedef NS_ENUM(NSUInteger, SCCameraTweaksStrategyType) { + SCCameraTweaksStrategyFollowABTest = 0, + SCCameraTweaksStrategyOverrideToYes, + SCCameraTweaksStrategyOverrideToNo +}; + +typedef NS_ENUM(NSUInteger, SCCameraHandsFreeModeType) { + SCCameraHandsFreeModeTypeABTest = 0, + SCCameraHandsFreeModeTypeDisabled, + SCCameraHandsFreeModeTypeMainOnly, + SCCameraHandsFreeModeTypeChatMoveCaptureButton, + SCCameraHandsFreeModeTypeMainAndChat, + SCCameraHandsFreeModeTypeLeftOfCapture, +}; + +/// Face detection and focus strategy in Tweak. There are less options in internal Tweaks than the A/B testing +/// strategies. +typedef NS_ENUM(NSUInteger, SCCameraFaceFocusModeStrategyType) { + SCCameraFaceFocusModeStrategyTypeABTest = 0, + SCCameraFaceFocusModeStrategyTypeDisabled, // Disabled for both cameras. + SCCameraFaceFocusModeStrategyTypeOffByDefault, // Enabled for both cameras, but is off by default. + SCCameraFaceFocusModeStrategyTypeOnByDefault, // Enabled for both cameras, but is off by default. +}; + +typedef NS_ENUM(NSUInteger, SCCameraFaceFocusDetectionMethodType) { + SCCameraFaceFocusDetectionMethodTypeABTest = 0, + SCCameraFaceFocusDetectionMethodTypeCIDetector, // Use SCCaptureCoreImageFaceDetector + SCCameraFaceFocusDetectionMethodTypeAVMetadata, // Use SCCaptureMetadataOutputDetector +}; + +SC_EXTERN_C_BEGIN + +extern SCManagedCaptureDeviceZoomHandlerType SCCameraTweaksDeviceZoomHandlerStrategy(void); + +extern BOOL SCCameraTweaksBlackCameraRecoveryEnabled(void); + +extern BOOL SCCameraTweaksMicPermissionEnabled(void); + +extern BOOL SCCameraTweaksEnableCaptureKeepRecordedVideo(void); + +extern BOOL SCCameraTweaksEnableHandsFreeXToCancel(void); +extern SCCameraHandsFreeModeType SCCameraTweaksHandsFreeMode(void); + +BOOL SCCameraTweaksEnableShortPreviewTransitionAnimationDuration(void); + +extern BOOL SCCameraTweaksEnablePreviewPresenterFastPreview(void); + +extern BOOL SCCameraTweaksEnableCaptureSharePerformer(void); + +extern BOOL SCCameraTweaksEnableFaceDetectionFocus(SCManagedCaptureDevicePosition captureDevicePosition); + +extern BOOL SCCameraTweaksTurnOnFaceDetectionFocusByDefault(SCManagedCaptureDevicePosition captureDevicePosition); + +extern SCCameraFaceFocusDetectionMethodType SCCameraFaceFocusDetectionMethod(void); + +extern CGFloat SCCameraFaceFocusMinFaceSize(void); + +extern BOOL SCCameraTweaksSessionLightWeightFixEnabled(void); + +SC_EXTERN_C_END + +static inline BOOL SCCameraTweaksEnableVideoStabilization(void) +{ + return FBTweakValue(@"Camera", @"Core Camera", @"Enable video stabilization", NO); +} + +static inline BOOL SCCameraTweaksEnableForceTouchToToggleCamera(void) +{ + return FBTweakValue(@"Camera", @"Recording", @"Force Touch to Toggle", NO); +} + +static inline BOOL SCCameraTweaksEnableStayOnCameraAfterPostingStory(void) +{ + return FBTweakValue(@"Camera", @"Story", @"Stay on camera after posting", NO); +} + +static inline BOOL SCCameraTweaksEnableKeepLastFrameOnCamera(void) +{ + return FBTweakValue(@"Camera", @"Core Camera", @"Keep last frame on camera", YES); +} + +static inline BOOL SCCameraTweaksSmoothAutoFocusWhileRecording(void) +{ + return FBTweakValue(@"Camera", @"Core Camera", @"Smooth autofocus while recording", YES); +} + +static inline NSInteger SCCameraExposureAdjustmentMode(void) +{ + return [FBTweakValue( + @"Camera", @"Core Camera", @"Adjust Exposure", (id) @0, + (@{ @0 : @"NO", + @1 : @"Dynamic enhancement", + @2 : @"Night vision", + @3 : @"Inverted night vision" })) integerValue]; +} + +static inline BOOL SCCameraTweaksRotateToggleCameraButton(void) +{ + return SCTweakValueWithHalt(@"Camera", @"Core Camera", @"Rotate Toggle-Camera Button", NO); +} + +static inline CGFloat SCCameraTweaksRotateToggleCameraButtonTime(void) +{ + return FBTweakValue(@"Camera", @"Core Camera", @"Toggle-Camera Button Rotation Time", 0.3); +} + +static inline BOOL SCCameraTweaksDefaultPortrait(void) +{ + return FBTweakValue(@"Camera", @"Core Camera", @"Default to Portrait Orientation", YES); +} + +// For test purpose +static inline BOOL SCCameraTweaksTranscodingAlwaysFails(void) +{ + return FBTweakValue(@"Camera", @"Core Camera", @"Transcoding always fails", NO); +} + +// This tweak disables the video masking behavior of the snap overlays; +// Intended to be used by curators who are on-site snapping special events. +// Ping news-dev@snapchat.com for any questions/comments +static inline BOOL SCCameraTweaksDisableOverlayVideoMask(void) +{ + return FBTweakValue(@"Camera", @"Creative Tools", @"Disable Overlay Video Masking", NO); +} + +static inline NSInteger SCCameraTweaksDelayTurnOnFilters(void) +{ + return [FBTweakValue(@"Camera", @"Core Camera", @"Delay turn on filter", (id) @0, + (@{ @0 : @"Respect A/B testing", + @1 : @"Override to YES", + @2 : @"Override to NO" })) integerValue]; +} + +static inline BOOL SCCameraTweaksEnableExposurePointObservation(void) +{ + return FBTweakValue(@"Camera", @"Core Camera - Face Focus", @"Observe Exposure Point", NO); +} + +static inline BOOL SCCameraTweaksEnableFocusPointObservation(void) +{ + return FBTweakValue(@"Camera", @"Core Camera - Face Focus", @"Observe Focus Point", NO); +} + +static inline CGFloat SCCameraTweaksSmoothZoomThresholdTime() +{ + return FBTweakValue(@"Camera", @"Zoom Strategy - Linear Interpolation", @"Threshold time", 0.3); +} + +static inline CGFloat SCCameraTweaksSmoothZoomThresholdFactor() +{ + return FBTweakValue(@"Camera", @"Zoom Strategy - Linear Interpolation", @"Threshold factor diff", 0.25); +} + +static inline CGFloat SCCameraTweaksSmoothZoomIntermediateFramesPerSecond() +{ + return FBTweakValue(@"Camera", @"Zoom Strategy - Linear Interpolation", @"Intermediate fps", 60); +} + +static inline CGFloat SCCameraTweaksSmoothZoomDelayTolerantTime() +{ + return FBTweakValue(@"Camera", @"Zoom Strategy - Linear Interpolation", @"Delay tolerant time", 0.15); +} + +static inline CGFloat SCCameraTweaksSmoothZoomMinStepLength() +{ + return FBTweakValue(@"Camera", @"Zoom Strategy - Linear Interpolation", @"Min step length", 0.05); +} + +static inline CGFloat SCCameraTweaksExposureDeadline() +{ + return FBTweakValue(@"Camera", @"Adjust Exposure", @"Exposure Deadline", 0.2); +} + +static inline BOOL SCCameraTweaksKillFrontCamera(void) +{ + return SCTweakValueWithHalt(@"Camera", @"Debugging", @"Kill Front Camera", NO); +} + +static inline BOOL SCCameraTweaksKillBackCamera(void) +{ + return SCTweakValueWithHalt(@"Camera", @"Debugging", @"Kill Back Camera", NO); +} + +#if TARGET_IPHONE_SIMULATOR + +static inline BOOL SCCameraTweaksUseRealMockImage(void) +{ + return FBTweakValue(@"Camera", @"Debugging", @"Use real mock image on simulator", YES); +} + +#endif + +static inline CGFloat SCCameraTweaksShortPreviewTransitionAnimationDuration() +{ + return FBTweakValue(@"Camera", @"Preview Transition", @"Short Animation Duration", 0.35); +} + +static inline SCCameraTweaksStrategyType SCCameraTweaksPreviewTransitionAnimationDurationStrategy() +{ + NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Preview Transition", @"Enable Short Animation Duration", + (id) @(SCCameraTweaksStrategyFollowABTest), (@{ + @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", + @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", + @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" + })); + return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; +} + +static inline CGFloat SCCameraTweaksEnablePortraitModeButton(void) +{ + return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Enable Button", NO); +} + +static inline CGFloat SCCameraTweaksDepthBlurForegroundThreshold(void) +{ + return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Foreground Blur Threshold", 0.3); +} + +static inline CGFloat SCCameraTweaksDepthBlurBackgroundThreshold(void) +{ + return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Background Blur Threshold", 0.1); +} + +static inline CGFloat SCCameraTweaksBlurSigma(void) +{ + return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Blur Sigma", 4.0); +} + +static inline BOOL SCCameraTweaksEnableFilterInputFocusRect(void) +{ + return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Filter Input Focus Rect", NO); +} + +static inline BOOL SCCameraTweaksEnablePortraitModeTapToFocus(void) +{ + return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Tap to Focus", NO); +} + +static inline BOOL SCCameraTweaksEnablePortraitModeAutofocus(void) +{ + return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Autofocus", NO); +} + +static inline BOOL SCCameraTweaksDepthToGrayscaleOverride(void) +{ + return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Depth to Grayscale Override", NO); +} + +static inline SCCameraTweaksStrategyType SCCameraTweaksEnableHandsFreeXToCancelStrategy(void) +{ + NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Hands-Free Recording", @"X to Cancel", + (id) @(SCCameraTweaksStrategyFollowABTest), (@{ + @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", + @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", + @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" + })); + return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; +} + +static inline SCCameraHandsFreeModeType SCCameraTweaksHandsFreeModeType() +{ + NSNumber *strategy = SCTweakValueWithHalt( + @"Camera", @"Hands-Free Recording", @"Enabled", (id) @(SCCameraHandsFreeModeTypeABTest), (@{ + @(SCCameraHandsFreeModeTypeABTest) : @"Respect A/B testing", + @(SCCameraHandsFreeModeTypeDisabled) : @"Disable", + @(SCCameraHandsFreeModeTypeMainOnly) : @"Main Camera only", + @(SCCameraHandsFreeModeTypeChatMoveCaptureButton) : @"Main Camera + move Chat capture button", + @(SCCameraHandsFreeModeTypeMainAndChat) : @"Main + Chat Cameras", + @(SCCameraHandsFreeModeTypeLeftOfCapture) : @"Left of Main + Chat Cameras" + })); + return (SCCameraHandsFreeModeType)[strategy unsignedIntegerValue]; +} + +static inline SCCameraTweaksStrategyType SCCameraTweaksPreviewPresenterFastPreviewStrategy(void) +{ + NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Preview Presenter", @"Fast Preview", + (id) @(SCCameraTweaksStrategyFollowABTest), (@{ + @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", + @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", + @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" + })); + return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; +} + +static inline NSInteger SCCameraTweaksEnableCaptureKeepRecordedVideoStrategy(void) +{ + NSNumber *strategy = + SCTweakValueWithHalt(@"Camera", @"Core Camera - Capture Keep Recorded Video", + @"Enable Capture Keep Recorded Video", (id) @(SCCameraTweaksStrategyFollowABTest), (@{ + @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", + @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", + @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" + })); + return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; +} + +static inline NSInteger SCCameraTweaksEnableCaptureSharePerformerStrategy(void) +{ + NSNumber *strategy = + SCTweakValueWithHalt(@"Camera", @"Core Camera - Capture Share Performer", @"Enable Capture Share Performer", + (id) @(SCCameraTweaksStrategyFollowABTest), (@{ + @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", + @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", + @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" + })); + return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; +} + +static inline SCCameraFaceFocusModeStrategyType SCCameraTweaksFaceFocusStrategy() +{ + NSNumber *strategy = + SCTweakValueWithHalt(@"Camera", @"Core Camera - Face Focus", @"Enable Face Focus", + (id) @(SCCameraFaceFocusModeStrategyTypeABTest), (@{ + @(SCCameraFaceFocusModeStrategyTypeABTest) : @"Respect A/B testing", + @(SCCameraFaceFocusModeStrategyTypeDisabled) : @"Disabled", + @(SCCameraFaceFocusModeStrategyTypeOffByDefault) : @"Enabled, off by default", + @(SCCameraFaceFocusModeStrategyTypeOnByDefault) : @"Enabled, on by default", + })); + return (SCCameraFaceFocusModeStrategyType)[strategy unsignedIntegerValue]; +} + +static inline SCCameraFaceFocusDetectionMethodType SCCameraTweaksFaceFocusDetectionMethodType() +{ + NSNumber *strategy = + SCTweakValueWithHalt(@"Camera", @"Core Camera - Face Focus", @"Detection Method", + (id) @(SCCameraFaceFocusDetectionMethodTypeABTest), (@{ + @(SCCameraFaceFocusDetectionMethodTypeABTest) : @"Respect A/B testing", + @(SCCameraFaceFocusDetectionMethodTypeCIDetector) : @"CIDetector", + @(SCCameraFaceFocusDetectionMethodTypeAVMetadata) : @"AVMetadata", + })); + return (SCCameraFaceFocusDetectionMethodType)[strategy unsignedIntegerValue]; +} + +static inline int SCCameraTweaksFaceFocusDetectionFrequency() +{ + return FBTweakValue(@"Camera", @"Core Camera - Face Focus", @"Detection Frequency", 3, 1, 30); +} + +static inline BOOL SCCameraTweaksFaceFocusMinFaceSizeRespectABTesting() +{ + return SCTweakValueWithHalt(@"Camera", @"Core Camera - Face Focus", @"Min Face Size Respect AB", YES); +} + +static inline CGFloat SCCameraTweaksFaceFocusMinFaceSizeValue() +{ + return FBTweakValue(@"Camera", @"Core Camera - Face Focus", @"Min Face Size", 0.25, 0.01, 0.5); +} + +static inline BOOL SCCameraTweaksEnableDualCamera(void) +{ + return SCTweakValueWithHalt(@"Camera", @"Core Camera - Dual Camera", @"Enable Dual Camera", NO); +} diff --git a/Tweaks/SCCameraTweaks.m b/Tweaks/SCCameraTweaks.m new file mode 100644 index 0000000..cb92928 --- /dev/null +++ b/Tweaks/SCCameraTweaks.m @@ -0,0 +1,396 @@ +// +// SCCameraTweaks.m +// Snapchat +// +// Created by Liu Liu on 10/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCCameraTweaks.h" + +#import "SCManagedCapturePreviewLayerController.h" + +#import +#import +#import + +SCManagedCaptureDeviceZoomHandlerType SCCameraTweaksDeviceZoomHandlerStrategy(void) +{ + + NSNumber *strategyNumber = SCTweakValueWithHalt( + @"Camera", @"Core Camera", @"Zoom Strategy", + @(SCIsMasterBuild() ? SCManagedCaptureDeviceLinearInterpolation : SCManagedCaptureDeviceDefaultZoom), (@{ + @(SCManagedCaptureDeviceDefaultZoom) : @"Default", + @(SCManagedCaptureDeviceSavitzkyGolayFilter) : @"Savitzky-Golay Filter", + @(SCManagedCaptureDeviceLinearInterpolation) : @"Linear Interpolation" + })); + return (SCManagedCaptureDeviceZoomHandlerType)[strategyNumber integerValue]; +} + +BOOL SCCameraTweaksEnableFaceDetectionFocus(SCManagedCaptureDevicePosition captureDevicePosition) +{ + SC_GUARD_ELSE_RETURN_VALUE([SCDeviceName isIphone], NO); + SC_GUARD_ELSE_RETURN_VALUE(captureDevicePosition != SCManagedCaptureDevicePositionBackDualCamera, NO); + + BOOL isFrontCamera = (captureDevicePosition == SCManagedCaptureDevicePositionFront); + BOOL isEnabled = NO; + SCCameraFaceFocusModeStrategyType option = SCCameraTweaksFaceFocusStrategy(); + switch (option) { + case SCCameraFaceFocusModeStrategyTypeABTest: + if (isFrontCamera) { + isEnabled = SCExperimentWithFaceDetectionFocusFrontCameraEnabled(); + } else { + isEnabled = SCExperimentWithFaceDetectionFocusBackCameraEnabled(); + } + break; + case SCCameraFaceFocusModeStrategyTypeDisabled: + isEnabled = NO; + break; + case SCCameraFaceFocusModeStrategyTypeOffByDefault: + case SCCameraFaceFocusModeStrategyTypeOnByDefault: + isEnabled = YES; + break; + } + return isEnabled; +} + +BOOL SCCameraTweaksTurnOnFaceDetectionFocusByDefault(SCManagedCaptureDevicePosition captureDevicePosition) +{ + SC_GUARD_ELSE_RETURN_VALUE([SCDeviceName isIphone], NO); + SC_GUARD_ELSE_RETURN_VALUE(captureDevicePosition != SCManagedCaptureDevicePositionBackDualCamera, NO); + + BOOL isFrontCamera = (captureDevicePosition == SCManagedCaptureDevicePositionFront); + BOOL isOnByDefault = NO; + SCCameraFaceFocusModeStrategyType option = SCCameraTweaksFaceFocusStrategy(); + switch (option) { + case SCCameraFaceFocusModeStrategyTypeABTest: + if (isFrontCamera) { + isOnByDefault = SCExperimentWithFaceDetectionFocusFrontCameraOnByDefault(); + } else { + isOnByDefault = SCExperimentWithFaceDetectionFocusBackCameraOnByDefault(); + } + break; + case SCCameraFaceFocusModeStrategyTypeDisabled: + case SCCameraFaceFocusModeStrategyTypeOffByDefault: + isOnByDefault = NO; + break; + case SCCameraFaceFocusModeStrategyTypeOnByDefault: + isOnByDefault = YES; + break; + } + return isOnByDefault; +} + +SCCameraFaceFocusDetectionMethodType SCCameraFaceFocusDetectionMethod() +{ + static SCCameraFaceFocusDetectionMethodType detectionMethodType; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + SCCameraFaceFocusDetectionMethodType option = SCCameraTweaksFaceFocusDetectionMethodType(); + switch (option) { + case SCCameraFaceFocusDetectionMethodTypeABTest: { + // Check the validity of AB value. + NSUInteger experimentValue = SCExperimentWithFaceDetectionFocusDetectionMethod(); + if (experimentValue >= SCCameraFaceFocusDetectionMethodTypeCIDetector && + experimentValue <= SCCameraFaceFocusDetectionMethodTypeAVMetadata) { + detectionMethodType = experimentValue; + } else { + // Use CIDetector by default. + detectionMethodType = SCCameraFaceFocusDetectionMethodTypeCIDetector; + } + } break; + case SCCameraFaceFocusDetectionMethodTypeAVMetadata: + detectionMethodType = SCCameraFaceFocusDetectionMethodTypeAVMetadata; + break; + case SCCameraFaceFocusDetectionMethodTypeCIDetector: + detectionMethodType = SCCameraFaceFocusDetectionMethodTypeCIDetector; + break; + } + }); + return detectionMethodType; +} + +CGFloat SCCameraFaceFocusMinFaceSize(void) +{ + static CGFloat minFaceSize; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (SCCameraTweaksFaceFocusMinFaceSizeRespectABTesting()) { + minFaceSize = (CGFloat)SCExperimentWithFaceDetectionMinFaceSize(); + } else { + minFaceSize = SCCameraTweaksFaceFocusMinFaceSizeValue(); + } + if (minFaceSize < 0.01 || minFaceSize > 0.5) { + minFaceSize = 0.25; // Default value is 0.25 + } + }); + return minFaceSize; +} + +BOOL SCCameraTweaksEnableCaptureKeepRecordedVideo(void) +{ + static BOOL enableCaptureKeepRecordedVideo; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + switch (SCCameraTweaksEnableCaptureKeepRecordedVideoStrategy()) { + case SCCameraTweaksStrategyOverrideToYes: { + enableCaptureKeepRecordedVideo = YES; + break; + } + case SCCameraTweaksStrategyOverrideToNo: { + enableCaptureKeepRecordedVideo = NO; + break; + } + case SCCameraTweaksStrategyFollowABTest: { + enableCaptureKeepRecordedVideo = SCExperimentWithCaptureKeepRecordedVideo(); + break; + } + default: { + enableCaptureKeepRecordedVideo = NO; + break; + } + } + }); + return enableCaptureKeepRecordedVideo; +} + +static inline SCCameraTweaksStrategyType SCCameraTweaksBlackCameraRecoveryStrategy(void) +{ + NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Core Camera", @"Black Camera Recovery", + (id) @(SCCameraTweaksStrategyFollowABTest), (@{ + @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", + @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", + @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" + })); + return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; +} + +BOOL SCCameraTweaksBlackCameraRecoveryEnabled(void) +{ + static BOOL enabled; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + switch (SCCameraTweaksBlackCameraRecoveryStrategy()) { + case SCCameraTweaksStrategyOverrideToYes: + enabled = YES; + break; + case SCCameraTweaksStrategyOverrideToNo: + enabled = NO; + break; + case SCCameraTweaksStrategyFollowABTest: + enabled = SCExperimentWithBlackCameraRecovery(); + break; + default: + enabled = NO; + break; + } + }); + return enabled; +} + +static inline SCCameraTweaksStrategyType SCCameraTweaksMicrophoneNotificationStrategy(void) +{ + NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Core Camera", @"Mic Notification", + (id) @(SCCameraTweaksStrategyFollowABTest), (@{ + @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", + @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", + @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" + })); + return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; +} + +BOOL SCCameraTweaksMicPermissionEnabled(void) +{ + static BOOL enabled; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + switch (SCCameraTweaksMicrophoneNotificationStrategy()) { + case SCCameraTweaksStrategyOverrideToYes: + enabled = YES; + break; + case SCCameraTweaksStrategyOverrideToNo: + enabled = NO; + break; + case SCCameraTweaksStrategyFollowABTest: + enabled = SCExperimentWithMicrophonePermissionNotificationEnabled(); + break; + default: + enabled = NO; + break; + } + }); + return enabled; +} + +SCCameraHandsFreeModeType SCCameraTweaksHandsFreeMode(void) +{ + static SCCameraHandsFreeModeType handsFreeModeType; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + SCCameraHandsFreeModeType option = SCCameraTweaksHandsFreeModeType(); + switch (option) { + case SCCameraHandsFreeModeTypeDisabled: + handsFreeModeType = SCCameraHandsFreeModeTypeDisabled; + break; + case SCCameraHandsFreeModeTypeMainOnly: + handsFreeModeType = SCCameraHandsFreeModeTypeMainOnly; + break; + case SCCameraHandsFreeModeTypeChatMoveCaptureButton: + handsFreeModeType = SCCameraHandsFreeModeTypeChatMoveCaptureButton; + break; + case SCCameraHandsFreeModeTypeMainAndChat: + handsFreeModeType = SCCameraHandsFreeModeTypeMainAndChat; + break; + case SCCameraHandsFreeModeTypeLeftOfCapture: + handsFreeModeType = SCCameraHandsFreeModeTypeLeftOfCapture; + break; + case SCCameraHandsFreeModeTypeABTest: + default: + handsFreeModeType = SCExperimentWithHandsFreeMode(); + break; + } + }); + return handsFreeModeType; +} + +BOOL SCCameraTweaksEnableHandsFreeXToCancel(void) +{ + static BOOL enableHandsFreeXToCancel; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + switch (SCCameraTweaksEnableHandsFreeXToCancelStrategy()) { + case SCCameraTweaksStrategyOverrideToYes: { + enableHandsFreeXToCancel = YES; + break; + } + case SCCameraTweaksStrategyOverrideToNo: { + enableHandsFreeXToCancel = NO; + break; + } + case SCCameraTweaksStrategyFollowABTest: { + enableHandsFreeXToCancel = SCExperimentWithHandsFreeXToCancel(); + break; + } + default: { + enableHandsFreeXToCancel = NO; + break; + } + } + }); + return enableHandsFreeXToCancel; +} + +BOOL SCCameraTweaksEnableShortPreviewTransitionAnimationDuration(void) +{ + static BOOL enableShortPreviewTransitionAnimationDuration; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + switch (SCCameraTweaksPreviewTransitionAnimationDurationStrategy()) { + case SCCameraTweaksStrategyOverrideToYes: { + enableShortPreviewTransitionAnimationDuration = YES; + break; + } + case SCCameraTweaksStrategyOverrideToNo: { + enableShortPreviewTransitionAnimationDuration = NO; + break; + } + case SCCameraTweaksStrategyFollowABTest: { + enableShortPreviewTransitionAnimationDuration = SCExperimentWithShortPreviewTransitionAnimationDuration(); + break; + } + default: { + enableShortPreviewTransitionAnimationDuration = YES; + break; + } + } + }); + return enableShortPreviewTransitionAnimationDuration; +} + +BOOL SCCameraTweaksEnablePreviewPresenterFastPreview(void) +{ + static BOOL enablePreviewPresenterFastPreview; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + switch (SCCameraTweaksPreviewPresenterFastPreviewStrategy()) { + case SCCameraTweaksStrategyOverrideToYes: { + enablePreviewPresenterFastPreview = YES; + break; + } + case SCCameraTweaksStrategyOverrideToNo: { + enablePreviewPresenterFastPreview = NO; + break; + } + case SCCameraTweaksStrategyFollowABTest: { + enablePreviewPresenterFastPreview = SCExperimentWithPreviewPresenterFastPreview(); + break; + } + default: { + enablePreviewPresenterFastPreview = NO; + break; + } + } + }); + return enablePreviewPresenterFastPreview; +} + +BOOL SCCameraTweaksEnableCaptureSharePerformer(void) +{ + static BOOL enableCaptureSharePerformer; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + switch (SCCameraTweaksEnableCaptureSharePerformerStrategy()) { + case SCCameraTweaksStrategyOverrideToYes: { + enableCaptureSharePerformer = YES; + break; + } + case SCCameraTweaksStrategyOverrideToNo: { + enableCaptureSharePerformer = NO; + break; + } + case SCCameraTweaksStrategyFollowABTest: { + enableCaptureSharePerformer = SCExperimentWithCaptureSharePerformer(); + break; + } + default: { + enableCaptureSharePerformer = NO; + break; + } + } + }); + return enableCaptureSharePerformer; +} + +static inline SCCameraTweaksStrategyType SCCameraTweaksSessionLightWeightFixStrategy(void) +{ + NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Core Camera", @"Light-weight Session Fix", + (id) @(SCCameraTweaksStrategyFollowABTest), (@{ + @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", + @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", + @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" + })); + return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; +} + +BOOL SCCameraTweaksSessionLightWeightFixEnabled(void) +{ + static BOOL enabled; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + switch (SCCameraTweaksSessionLightWeightFixStrategy()) { + case SCCameraTweaksStrategyOverrideToYes: + enabled = YES; + break; + case SCCameraTweaksStrategyOverrideToNo: + enabled = NO; + break; + case SCCameraTweaksStrategyFollowABTest: + enabled = SCExperimentWithSessionLightWeightFix(); + break; + default: + enabled = NO; + break; + } + }); + return enabled; +} diff --git a/UI/AVCameraViewEnums.h b/UI/AVCameraViewEnums.h new file mode 100644 index 0000000..e742792 --- /dev/null +++ b/UI/AVCameraViewEnums.h @@ -0,0 +1,47 @@ +// +// AVCameraViewEnums.h +// SCCamera +// +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import + +/** + The context specifies the way in which the camera is presented to the user. + The controller can be configured a variety of ways depending on the context. + */ +typedef NS_ENUM(NSUInteger, AVCameraViewControllerContext) { + AVCameraViewControllerContextMainVC = 1, + AVCameraViewControllerContextReply, + AVCameraViewControllerContextDefault = AVCameraViewControllerContextReply, + AVCameraViewControllerContextSnapAds, + AVCameraViewControllerContextAddToStory, +}; + +typedef NS_ENUM(NSInteger, AVCameraViewType) { + AVCameraViewNoReply = 0, + AVCameraViewReplyLeft, + AVCameraViewReplyRight, + AVCameraViewChat, + AVCameraViewReplyHydra, + AVCameraViewSnapAds, + AVCameraViewGalleryMadeWithLenses, + AVCameraViewSnapConnectSnapKit, + AVCameraViewSnappable +}; + +typedef NS_ENUM(NSUInteger, AVCameraViewControllerRecordingState) { + AVCameraViewControllerRecordingStateDefault, // No capture activity + AVCameraViewControllerRecordingStatePrepareRecording, // Preparing for recording with delay + AVCameraViewControllerRecordingStateInitiatedRecording, // Actively recording + AVCameraViewControllerRecordingStateTakingPicture, // Taking a still image + AVCameraViewControllerRecordingStatePictureTaken, // Picture is taken + AVCameraViewControllerRecordingStatePreview, // Preparing to present preview +}; + +typedef NS_ENUM(NSUInteger, SCCameraRecordingMethod) { + SCCameraRecordingMethodCameraButton, + SCCameraRecordingMethodVolumeButton, + SCCameraRecordingMethodLensInitiated +}; diff --git a/UI/Lenses/LensButtonZ/SCFeatureLensButtonZ.h b/UI/Lenses/LensButtonZ/SCFeatureLensButtonZ.h new file mode 100644 index 0000000..ca2b988 --- /dev/null +++ b/UI/Lenses/LensButtonZ/SCFeatureLensButtonZ.h @@ -0,0 +1,32 @@ +// +// SCFeatureLensButtonZ.h +// SCCamera +// +// Created by Anton Udovychenko on 4/24/18. +// + +#import "AVCameraViewEnums.h" +#import "SCFeature.h" + +#import + +@protocol SCFeatureLensButtonZ; +@class SCGrowingButton, SCLens; + +NS_ASSUME_NONNULL_BEGIN + +@protocol SCFeatureLensButtonZDelegate +- (void)featureLensZButton:(id)featureLensZButton + didPressLensButton:(SCGrowingButton *)lensButton; +- (nullable NSArray *)allLenses; +@end + +@protocol SCFeatureLensButtonZ + +@property (nonatomic, weak) id delegate; + +- (void)setLensButtonActive:(BOOL)active; + +@end + +NS_ASSUME_NONNULL_END diff --git a/UI/Lenses/LensSideButton/SCFeatureLensSideButton.h b/UI/Lenses/LensSideButton/SCFeatureLensSideButton.h new file mode 100644 index 0000000..eea180d --- /dev/null +++ b/UI/Lenses/LensSideButton/SCFeatureLensSideButton.h @@ -0,0 +1,33 @@ +// +// SCFeatureLensSideButton.h +// SCCamera +// +// Created by Anton Udovychenko on 4/12/18. +// + +#import "AVCameraViewEnums.h" +#import "SCFeature.h" + +#import + +@protocol SCFeatureLensSideButton; +@class SCGrowingButton, SCLens; + +NS_ASSUME_NONNULL_BEGIN + +@protocol SCFeatureLensSideButtonDelegate +- (void)featureLensSideButton:(id)featureLensSideButton + didPressLensButton:(SCGrowingButton *)lensButton; +- (nullable SCLens *)firstApplicableLens; +@end + +@protocol SCFeatureLensSideButton + +@property (nonatomic, weak) id delegate; + +- (void)updateLensButtonVisibility:(CGFloat)visibilityPercentage; +- (void)showLensButtonIfNeeded; + +@end + +NS_ASSUME_NONNULL_END diff --git a/UI/SCLongPressGestureRecognizer.h b/UI/SCLongPressGestureRecognizer.h new file mode 100644 index 0000000..4d0f6c1 --- /dev/null +++ b/UI/SCLongPressGestureRecognizer.h @@ -0,0 +1,24 @@ +// +// SCLongPressGestureRecognizer.h +// SCCamera +// +// Created by Pavlo Antonenko on 4/28/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import + +// gesture recognizer cancels, if user moved finger more then defined value, even if long press started, unlike +// UILongPressGestureRecognizer. But if user haven't moved finger for defined time, unlimited movement is allowed. +@interface SCLongPressGestureRecognizer : UILongPressGestureRecognizer + +@property (nonatomic, assign) CGFloat allowableMovementAfterBegan; +@property (nonatomic, assign) CGFloat timeBeforeUnlimitedMovementAllowed; +@property (nonatomic, assign, readonly) CGFloat forceOfAllTouches; +@property (nonatomic, assign, readonly) CGFloat maximumPossibleForceOfAllTouches; +@property (nonatomic, strong) NSDictionary *userInfo; +@property (nonatomic, assign) BOOL failedByMovement; + +- (BOOL)isUnlimitedMovementAllowed; + +@end diff --git a/UI/SCLongPressGestureRecognizer.m b/UI/SCLongPressGestureRecognizer.m new file mode 100644 index 0000000..e633d4b --- /dev/null +++ b/UI/SCLongPressGestureRecognizer.m @@ -0,0 +1,88 @@ +// +// SCLongPressGestureRecognizer.m +// SCCamera +// +// Created by Pavlo Antonenko on 4/28/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCLongPressGestureRecognizer.h" + +#import + +#import + +@implementation SCLongPressGestureRecognizer { + CGPoint _initialPoint; + CGFloat _initialTime; +} + +- (instancetype)initWithTarget:(id)target action:(SEL)action +{ + self = [super initWithTarget:target action:action]; + if (self) { + _allowableMovementAfterBegan = FLT_MAX; + _timeBeforeUnlimitedMovementAllowed = 0.0; + } + return self; +} + +- (void)reset +{ + [super reset]; + _initialPoint = CGPointZero; + _initialTime = 0; + _forceOfAllTouches = 1.0; + _maximumPossibleForceOfAllTouches = 1.0; + self.failedByMovement = NO; +} + +- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event +{ + [super touchesBegan:touches withEvent:event]; + _initialPoint = [self locationInView:self.view]; + _initialTime = CACurrentMediaTime(); + _forceOfAllTouches = 1.0; + for (UITouch *touch in touches) { + _maximumPossibleForceOfAllTouches = MAX(touch.maximumPossibleForce, _maximumPossibleForceOfAllTouches); + _forceOfAllTouches = MAX(touch.force, _forceOfAllTouches); + } +} + +- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event +{ + [super touchesMoved:touches withEvent:event]; + + _forceOfAllTouches = 1.0; + for (UITouch *touch in touches) { + _maximumPossibleForceOfAllTouches = MAX(touch.maximumPossibleForce, _maximumPossibleForceOfAllTouches); + _forceOfAllTouches = MAX(touch.force, _forceOfAllTouches); + } + + if (!CGPointEqualToPoint(_initialPoint, CGPointZero)) { + CGPoint currentPoint = [self locationInView:self.view]; + + CGFloat distance = hypot(_initialPoint.x - currentPoint.x, _initialPoint.y - currentPoint.y); + CGFloat timeDifference = CACurrentMediaTime() - _initialTime; + + if (distance > self.allowableMovementAfterBegan && timeDifference < self.timeBeforeUnlimitedMovementAllowed) { + SCLogGeneralInfo(@"Long press moved %.2f > %.2f after %.3f < %.3f seconds, and is cancelled", distance, + self.allowableMovementAfterBegan, timeDifference, self.timeBeforeUnlimitedMovementAllowed); + self.state = UIGestureRecognizerStateFailed; + self.failedByMovement = YES; + } + } +} + +- (void)setEnabled:(BOOL)enabled +{ + SCLogGeneralInfo(@"Setting enabled: %d for %@", enabled, self); + [super setEnabled:enabled]; +} + +- (BOOL)isUnlimitedMovementAllowed +{ + return CACurrentMediaTime() - _initialTime > self.timeBeforeUnlimitedMovementAllowed; +} + +@end diff --git a/VolumeButton/SCCameraVolumeButtonHandler.h b/VolumeButton/SCCameraVolumeButtonHandler.h new file mode 100644 index 0000000..1332019 --- /dev/null +++ b/VolumeButton/SCCameraVolumeButtonHandler.h @@ -0,0 +1,31 @@ +// +// SCCameraVolumeButtonHandler.h +// Snapchat +// +// Created by Xiaomu Wu on 2/27/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import + +@class SCCameraVolumeButtonHandler; + +@protocol SCCameraVolumeButtonHandlerDelegate + +- (void)volumeButtonHandlerDidBeginPressingVolumeButton:(SCCameraVolumeButtonHandler *)handler; +- (void)volumeButtonHandlerDidEndPressingVolumeButton:(SCCameraVolumeButtonHandler *)handler; + +@end + +@interface SCCameraVolumeButtonHandler : NSObject + +@property (nonatomic, weak) id delegate; + +- (void)startHandlingVolumeButtonEvents; +- (void)stopHandlingVolumeButtonEvents; +- (void)stopHandlingVolumeButtonEventsWhenPressingEnds; +- (BOOL)isHandlingVolumeButtonEvents; + +- (BOOL)isPressingVolumeButton; + +@end diff --git a/VolumeButton/SCCameraVolumeButtonHandler.m b/VolumeButton/SCCameraVolumeButtonHandler.m new file mode 100644 index 0000000..5c29c8c --- /dev/null +++ b/VolumeButton/SCCameraVolumeButtonHandler.m @@ -0,0 +1,190 @@ +// +// SCCameraVolumeButtonHandler.m +// Snapchat +// +// Created by Xiaomu Wu on 2/27/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCCameraVolumeButtonHandler.h" + +#import +#import + +@implementation SCCameraVolumeButtonHandler { + NSString *_secretFeatureToken; + BOOL _pressingButton1; // volume down button + BOOL _pressingButton2; // volume up button + BOOL _stopsHandlingWhenPressingEnds; +} + +#pragma mark - NSObject + +- (instancetype)init +{ + self = [super init]; + if (self) { + NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter]; + UIApplication *application = [UIApplication sharedApplication]; + [notificationCenter addObserver:self + selector:@selector(_handleButton1Down:) + name:[application sc_eventNotificationName1] + object:nil]; + [notificationCenter addObserver:self + selector:@selector(_handleButton1Up:) + name:[application sc_eventNotificationName2] + object:nil]; + [notificationCenter addObserver:self + selector:@selector(_handleButton2Down:) + name:[application sc_eventNotificationName3] + object:nil]; + [notificationCenter addObserver:self + selector:@selector(_handleButton2Up:) + name:[application sc_eventNotificationName4] + object:nil]; + } + return self; +} + +- (void)dealloc +{ + if (_secretFeatureToken) { + [[UIApplication sharedApplication] sc_disableSecretFeature2:_secretFeatureToken]; + } +} + +#pragma mark - Public + +- (void)startHandlingVolumeButtonEvents +{ + _stopsHandlingWhenPressingEnds = NO; + [self _resetPressingButtons]; + if ([self isHandlingVolumeButtonEvents]) { + return; + } + SCLogGeneralInfo(@"[Volume Buttons] Start handling volume button events"); + _secretFeatureToken = [[[UIApplication sharedApplication] sc_enableSecretFeature2] copy]; +} + +- (void)stopHandlingVolumeButtonEvents +{ + if (![self isHandlingVolumeButtonEvents]) { + return; + } + SCLogGeneralInfo(@"[Volume Buttons] Stop handling volume button events"); + [[UIApplication sharedApplication] sc_disableSecretFeature2:_secretFeatureToken]; + _secretFeatureToken = nil; + _stopsHandlingWhenPressingEnds = NO; +} + +- (void)stopHandlingVolumeButtonEventsWhenPressingEnds +{ + if (![self isHandlingVolumeButtonEvents]) { + return; + } + if (![self isPressingVolumeButton]) { + return; + } + SCLogGeneralInfo(@"[Volume Buttons] Stop handling volume button events when pressing ends"); + _stopsHandlingWhenPressingEnds = YES; +} + +- (BOOL)isHandlingVolumeButtonEvents +{ + return (_secretFeatureToken != nil); +} + +- (BOOL)isPressingVolumeButton +{ + return _pressingButton1 || _pressingButton2; +} + +- (void)_resetPressingButtons +{ + _pressingButton1 = NO; + _pressingButton2 = NO; +} + +#pragma mark - Private + +- (void)_handleButton1Down:(NSNotification *)notification +{ + if (![self isHandlingVolumeButtonEvents]) { + SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 down, not handled"); + return; + } + if (_pressingButton1) { + SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 down, already down"); + return; + } + SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 down"); + [self _changePressingButton:^{ + _pressingButton1 = YES; + }]; +} + +- (void)_handleButton1Up:(NSNotification *)notification +{ + if (![self isHandlingVolumeButtonEvents]) { + SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 up, not handled"); + return; + } + if (!_pressingButton1) { + SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 up, already up"); + return; + } + SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 up"); + [self _changePressingButton:^{ + _pressingButton1 = NO; + }]; +} + +- (void)_handleButton2Down:(NSNotification *)notification +{ + if (![self isHandlingVolumeButtonEvents]) { + SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 down, not handled"); + return; + } + if (_pressingButton2) { + SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 down, already down"); + return; + } + SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 down"); + [self _changePressingButton:^{ + _pressingButton2 = YES; + }]; +} + +- (void)_handleButton2Up:(NSNotification *)notification +{ + if (![self isHandlingVolumeButtonEvents]) { + SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 up, not handled"); + return; + } + if (!_pressingButton2) { + SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 up, already up"); + return; + } + SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 up"); + [self _changePressingButton:^{ + _pressingButton2 = NO; + }]; +} + +- (void)_changePressingButton:(void (^)(void))change +{ + BOOL oldPressingVolumeButton = [self isPressingVolumeButton]; + change(); + BOOL newPressingVolumeButton = [self isPressingVolumeButton]; + + if (!oldPressingVolumeButton && newPressingVolumeButton) { + [_delegate volumeButtonHandlerDidBeginPressingVolumeButton:self]; + } else if (oldPressingVolumeButton && !newPressingVolumeButton) { + [_delegate volumeButtonHandlerDidEndPressingVolumeButton:self]; + if (_stopsHandlingWhenPressingEnds) { + [self stopHandlingVolumeButtonEvents]; + } + } +} + +@end diff --git a/Worker/SCCaptureWorker.h b/Worker/SCCaptureWorker.h new file mode 100644 index 0000000..ec92b78 --- /dev/null +++ b/Worker/SCCaptureWorker.h @@ -0,0 +1,109 @@ +// +// SCCaptureWorker.h +// Snapchat +// +// Created by Lin Jia on 10/19/17. +// +// + +#import "SCCaptureResource.h" + +#import + +#import + +/* + In general, the function of SCCapturer is to use some resources (such as SCManagedCapturerListenerAnnouncer), to do + something (such as announce an event). + + SCCaptureWorker abstract away the "do something" part of SCCapturer. It has very little internal states/resources. + + SCCaptureWorker is introduced to be shared between CaptureV1 and CaptureV2, to minimize duplication code. + + */ + +@interface SCCaptureWorker : NSObject + ++ (SCCaptureResource *)generateCaptureResource; + ++ (void)setupWithCaptureResource:(SCCaptureResource *)captureResource + devicePosition:(SCManagedCaptureDevicePosition)devicePosition; + ++ (void)setupCapturePreviewLayerController; + ++ (void)startRunningWithCaptureResource:(SCCaptureResource *)captureResource + token:(SCCapturerToken *)token + completionHandler:(dispatch_block_t)completionHandler; + ++ (BOOL)stopRunningWithCaptureResource:(SCCaptureResource *)captureResource + token:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler; + ++ (void)setupVideoPreviewLayer:(SCCaptureResource *)resource; + ++ (void)makeVideoPreviewLayer:(SCCaptureResource *)resource; + ++ (void)redoVideoPreviewLayer:(SCCaptureResource *)resource; + ++ (void)startStreaming:(SCCaptureResource *)resource; + ++ (void)setupLivenessConsistencyTimerIfForeground:(SCCaptureResource *)resource; + ++ (void)destroyLivenessConsistencyTimer:(SCCaptureResource *)resource; + ++ (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device resource:(SCCaptureResource *)resource; + ++ (void)captureStillImageWithCaptureResource:(SCCaptureResource *)captureResource + aspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context; + ++ (void)startRecordingWithCaptureResource:(SCCaptureResource *)captureResource + outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler; + ++ (void)stopRecordingWithCaptureResource:(SCCaptureResource *)captureResource; + ++ (void)cancelRecordingWithCaptureResource:(SCCaptureResource *)captureResource; + ++ (SCVideoCaptureSessionInfo)activeSession:(SCCaptureResource *)resource; + ++ (BOOL)canRunARSession:(SCCaptureResource *)resource; + ++ (void)turnARSessionOn:(SCCaptureResource *)resource; + ++ (void)turnARSessionOff:(SCCaptureResource *)resource; + ++ (void)clearARKitData:(SCCaptureResource *)resource; + ++ (void)updateLensesFieldOfViewTracking:(SCCaptureResource *)captureResource; + ++ (CMTime)firstWrittenAudioBufferDelay:(SCCaptureResource *)resource; + ++ (BOOL)audioQueueStarted:(SCCaptureResource *)resource; + ++ (BOOL)isLensApplied:(SCCaptureResource *)resource; + ++ (BOOL)isVideoMirrored:(SCCaptureResource *)resource; + ++ (BOOL)shouldCaptureImageFromVideoWithResource:(SCCaptureResource *)resource; + ++ (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest + completionHandler:(dispatch_block_t)completionHandler + resource:(SCCaptureResource *)resource; + ++ (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration + resource:(SCCaptureResource *)resource; + ++ (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource; + ++ (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration resource:(SCCaptureResource *)resource; + +@end diff --git a/Worker/SCCaptureWorker.m b/Worker/SCCaptureWorker.m new file mode 100644 index 0000000..fa805f9 --- /dev/null +++ b/Worker/SCCaptureWorker.m @@ -0,0 +1,1095 @@ +// +// SCCaptureWorker.m +// Snapchat +// +// Created by Lin Jia on 10/19/17. +// +// + +#import "SCCaptureWorker.h" + +#import "ARConfiguration+SCConfiguration.h" +#import "SCBlackCameraDetector.h" +#import "SCBlackCameraNoOutputDetector.h" +#import "SCCameraTweaks.h" +#import "SCCaptureCoreImageFaceDetector.h" +#import "SCCaptureFaceDetector.h" +#import "SCCaptureMetadataOutputDetector.h" +#import "SCCaptureSessionFixer.h" +#import "SCManagedCaptureDevice+SCManagedCapturer.h" +#import "SCManagedCaptureDeviceDefaultZoomHandler.h" +#import "SCManagedCaptureDeviceHandler.h" +#import "SCManagedCaptureDeviceLinearInterpolationZoomHandler.h" +#import "SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h" +#import "SCManagedCaptureDeviceSubjectAreaHandler.h" +#import "SCManagedCapturePreviewLayerController.h" +#import "SCManagedCaptureSession.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerARImageCaptureProvider.h" +#import "SCManagedCapturerARSessionHandler.h" +#import "SCManagedCapturerGLViewManagerAPI.h" +#import "SCManagedCapturerLensAPIProvider.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerState.h" +#import "SCManagedCapturerStateBuilder.h" +#import "SCManagedCapturerV1.h" +#import "SCManagedDeviceCapacityAnalyzer.h" +#import "SCManagedDeviceCapacityAnalyzerHandler.h" +#import "SCManagedDroppedFramesReporter.h" +#import "SCManagedFrontFlashController.h" +#import "SCManagedStillImageCapturerHandler.h" +#import "SCManagedVideoARDataSource.h" +#import "SCManagedVideoCapturer.h" +#import "SCManagedVideoCapturerHandler.h" +#import "SCManagedVideoFileStreamer.h" +#import "SCManagedVideoScanner.h" +#import "SCManagedVideoStreamReporter.h" +#import "SCManagedVideoStreamer.h" +#import "SCMetalUtils.h" +#import "SCProcessingPipelineBuilder.h" +#import "SCVideoCaptureSessionInfo.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import + +@import ARKit; + +static const char *kSCManagedCapturerQueueLabel = "com.snapchat.managed_capturer"; +static NSTimeInterval const kMaxDefaultScanFrameDuration = 1. / 15; // Restrict scanning to max 15 frames per second +static NSTimeInterval const kMaxPassiveScanFrameDuration = 1.; // Restrict scanning to max 1 frame per second +static float const kScanTargetCPUUtilization = 0.5; // 50% utilization + +static NSString *const kSCManagedCapturerErrorDomain = @"kSCManagedCapturerErrorDomain"; +static NSInteger const kSCManagedCapturerRecordVideoBusy = 3001; +static NSInteger const kSCManagedCapturerCaptureStillImageBusy = 3002; + +static UIImageOrientation SCMirroredImageOrientation(UIImageOrientation orientation) +{ + switch (orientation) { + case UIImageOrientationRight: + return UIImageOrientationLeftMirrored; + case UIImageOrientationLeftMirrored: + return UIImageOrientationRight; + case UIImageOrientationUp: + return UIImageOrientationUpMirrored; + case UIImageOrientationUpMirrored: + return UIImageOrientationUp; + case UIImageOrientationDown: + return UIImageOrientationDownMirrored; + case UIImageOrientationDownMirrored: + return UIImageOrientationDown; + case UIImageOrientationLeft: + return UIImageOrientationRightMirrored; + case UIImageOrientationRightMirrored: + return UIImageOrientationLeft; + } +} + +@implementation SCCaptureWorker + ++ (SCCaptureResource *)generateCaptureResource +{ + SCCaptureResource *captureResource = [[SCCaptureResource alloc] init]; + + captureResource.queuePerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedCapturerQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + + captureResource.announcer = [[SCManagedCapturerListenerAnnouncer alloc] init]; + captureResource.videoCapturerHandler = + [[SCManagedVideoCapturerHandler alloc] initWithCaptureResource:captureResource]; + captureResource.stillImageCapturerHandler = + [[SCManagedStillImageCapturerHandler alloc] initWithCaptureResource:captureResource]; + captureResource.deviceCapacityAnalyzerHandler = + [[SCManagedDeviceCapacityAnalyzerHandler alloc] initWithCaptureResource:captureResource]; + captureResource.deviceZoomHandler = ({ + SCManagedCaptureDeviceDefaultZoomHandler *handler = nil; + switch (SCCameraTweaksDeviceZoomHandlerStrategy()) { + case SCManagedCaptureDeviceDefaultZoom: + handler = [[SCManagedCaptureDeviceDefaultZoomHandler alloc] initWithCaptureResource:captureResource]; + break; + case SCManagedCaptureDeviceSavitzkyGolayFilter: + handler = [[SCManagedCaptureDeviceSavitzkyGolayZoomHandler alloc] initWithCaptureResource:captureResource]; + break; + case SCManagedCaptureDeviceLinearInterpolation: + handler = + [[SCManagedCaptureDeviceLinearInterpolationZoomHandler alloc] initWithCaptureResource:captureResource]; + break; + } + handler; + }); + captureResource.captureDeviceHandler = + [[SCManagedCaptureDeviceHandler alloc] initWithCaptureResource:captureResource]; + captureResource.arSessionHandler = + [[SCManagedCapturerARSessionHandler alloc] initWithCaptureResource:captureResource]; + + captureResource.tokenSet = [NSMutableSet new]; + captureResource.allowsZoom = YES; + captureResource.debugInfoDict = [[NSMutableDictionary alloc] init]; + captureResource.notificationRegistered = NO; + return captureResource; +} + ++ (void)setupWithCaptureResource:(SCCaptureResource *)captureResource + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceODPCompatibleStart(2); + SCAssert(captureResource.status == SCManagedCapturerStatusUnknown, @"The status should be unknown"); + captureResource.device = [SCManagedCaptureDevice deviceWithPosition:devicePosition]; + if (!captureResource.device) { + // Always prefer front camera over back camera + if ([SCManagedCaptureDevice front]) { + captureResource.device = [SCManagedCaptureDevice front]; + devicePosition = SCManagedCaptureDevicePositionFront; + } else { + captureResource.device = [SCManagedCaptureDevice back]; + devicePosition = SCManagedCaptureDevicePositionBack; + } + } + // Initial state + SCLogCapturerInfo(@"Init state with devicePosition:%lu, zoomFactor:%f, flashSupported:%d, " + @"torchSupported:%d, flashActive:%d, torchActive:%d", + (unsigned long)devicePosition, captureResource.device.zoomFactor, + captureResource.device.isFlashSupported, captureResource.device.isTorchSupported, + captureResource.device.flashActive, captureResource.device.torchActive); + captureResource.state = [[SCManagedCapturerState alloc] initWithIsRunning:NO + isNightModeActive:NO + isPortraitModeActive:NO + lowLightCondition:NO + adjustingExposure:NO + devicePosition:devicePosition + zoomFactor:captureResource.device.zoomFactor + flashSupported:captureResource.device.isFlashSupported + torchSupported:captureResource.device.isTorchSupported + flashActive:captureResource.device.flashActive + torchActive:captureResource.device.torchActive + lensesActive:NO + arSessionActive:NO + liveVideoStreaming:NO + lensProcessorReady:NO]; + + [self configLensesProcessorWithCaptureResource:captureResource]; + [self configARSessionWithCaptureResource:captureResource]; + [self configCaptureDeviceHandlerWithCaptureResource:captureResource]; + [self configAVCaptureSessionWithCaptureResource:captureResource]; + [self configImageCapturerWithCaptureResource:captureResource]; + [self configDeviceCapacityAnalyzerWithCaptureResource:captureResource]; + [self configVideoDataSourceWithCaptureResource:captureResource devicePosition:devicePosition]; + [self configVideoScannerWithCaptureResource:captureResource]; + [self configVideoCapturerWithCaptureResource:captureResource]; + + if (!SCIsSimulator()) { + // We don't want it enabled for simulator + [self configBlackCameraDetectorWithCaptureResource:captureResource]; + } + + if (SCCameraTweaksEnableFaceDetectionFocus(captureResource.state.devicePosition)) { + [self configureCaptureFaceDetectorWithCaptureResource:captureResource]; + } +} + ++ (void)setupCapturePreviewLayerController +{ + SCAssert([[SCQueuePerformer mainQueuePerformer] isCurrentPerformer], @""); + [[SCManagedCapturePreviewLayerController sharedInstance] setupPreviewLayer]; +} + ++ (void)configLensesProcessorWithCaptureResource:(SCCaptureResource *)captureResource +{ + SCManagedCapturerStateBuilder *stateBuilder = + [SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state]; + [stateBuilder setLensProcessorReady:YES]; + captureResource.state = [stateBuilder build]; + + captureResource.lensProcessingCore = [captureResource.lensAPIProvider lensAPIForCaptureResource:captureResource]; +} + ++ (void)configARSessionWithCaptureResource:(SCCaptureResource *)captureResource +{ + if (@available(iOS 11.0, *)) { + captureResource.arSession = [[ARSession alloc] init]; + + captureResource.arImageCapturer = + [captureResource.arImageCaptureProvider arImageCapturerWith:captureResource.queuePerformer + lensProcessingCore:captureResource.lensProcessingCore]; + } +} + ++ (void)configAVCaptureSessionWithCaptureResource:(SCCaptureResource *)captureResource +{ +#if !TARGET_IPHONE_SIMULATOR + captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + // lazily initialize _captureResource.kvoController on background thread + if (!captureResource.kvoController) { + captureResource.kvoController = [[FBKVOController alloc] initWithObserver:[SCManagedCapturerV1 sharedInstance]]; + } + [captureResource.kvoController unobserve:captureResource.managedSession.avSession]; + captureResource.managedSession = + [[SCManagedCaptureSession alloc] initWithBlackCameraDetector:captureResource.blackCameraDetector]; + [captureResource.kvoController observe:captureResource.managedSession.avSession + keyPath:@keypath(captureResource.managedSession.avSession, running) + options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld + action:captureResource.handleAVSessionStatusChange]; +#endif + + [captureResource.managedSession.avSession setAutomaticallyConfiguresApplicationAudioSession:NO]; + [captureResource.device setDeviceAsInput:captureResource.managedSession.avSession]; +} + ++ (void)configDeviceCapacityAnalyzerWithCaptureResource:(SCCaptureResource *)captureResource +{ + captureResource.deviceCapacityAnalyzer = + [[SCManagedDeviceCapacityAnalyzer alloc] initWithPerformer:captureResource.videoDataSource.performer]; + [captureResource.deviceCapacityAnalyzer addListener:captureResource.deviceCapacityAnalyzerHandler]; + [captureResource.deviceCapacityAnalyzer setLowLightConditionEnabled:[SCManagedCaptureDevice isNightModeSupported]]; + [captureResource.deviceCapacityAnalyzer addListener:captureResource.stillImageCapturer]; + [captureResource.deviceCapacityAnalyzer setAsFocusListenerForDevice:captureResource.device]; +} + ++ (void)configVideoDataSourceWithCaptureResource:(SCCaptureResource *)captureResource + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + if (captureResource.fileInputDecider.shouldProcessFileInput) { + captureResource.videoDataSource = + [[SCManagedVideoFileStreamer alloc] initWithPlaybackForURL:captureResource.fileInputDecider.fileURL]; + [captureResource.lensProcessingCore setLensesActive:YES + videoOrientation:captureResource.videoDataSource.videoOrientation + filterFactory:nil]; + + runOnMainThreadAsynchronously(^{ + [captureResource.videoPreviewGLViewManager prepareViewIfNecessary]; + }); + } else { + if (@available(iOS 11.0, *)) { + captureResource.videoDataSource = + [[SCManagedVideoStreamer alloc] initWithSession:captureResource.managedSession.avSession + arSession:captureResource.arSession + devicePosition:devicePosition]; + [captureResource.videoDataSource addListener:captureResource.arImageCapturer]; + if (captureResource.state.isPortraitModeActive) { + [captureResource.videoDataSource setDepthCaptureEnabled:YES]; + + SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init]; + processingPipelineBuilder.portraitModeEnabled = YES; + SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; + [captureResource.videoDataSource addProcessingPipeline:pipeline]; + } + } else { + captureResource.videoDataSource = + [[SCManagedVideoStreamer alloc] initWithSession:captureResource.managedSession.avSession + devicePosition:devicePosition]; + } + } + + [captureResource.videoDataSource addListener:captureResource.lensProcessingCore.capturerListener]; + [captureResource.videoDataSource addListener:captureResource.deviceCapacityAnalyzer]; + [captureResource.videoDataSource addListener:captureResource.stillImageCapturer]; + + if (SCIsMasterBuild()) { + captureResource.videoStreamReporter = [[SCManagedVideoStreamReporter alloc] init]; + [captureResource.videoDataSource addListener:captureResource.videoStreamReporter]; + } +} + ++ (void)configVideoScannerWithCaptureResource:(SCCaptureResource *)captureResource +{ + // When initializing video scanner: + // Restrict default scanning to max 15 frames per second. + // Restrict passive scanning to max 1 frame per second. + // Give CPU time to rest. + captureResource.videoScanner = + [[SCManagedVideoScanner alloc] initWithMaxFrameDefaultDuration:kMaxDefaultScanFrameDuration + maxFramePassiveDuration:kMaxPassiveScanFrameDuration + restCycle:1 - kScanTargetCPUUtilization]; + [captureResource.videoDataSource addListener:captureResource.videoScanner]; + [captureResource.deviceCapacityAnalyzer addListener:captureResource.videoScanner]; +} + ++ (void)configVideoCapturerWithCaptureResource:(SCCaptureResource *)captureResource +{ + if (SCCameraTweaksEnableCaptureSharePerformer()) { + captureResource.videoCapturer = + [[SCManagedVideoCapturer alloc] initWithQueuePerformer:captureResource.queuePerformer]; + } else { + captureResource.videoCapturer = [[SCManagedVideoCapturer alloc] init]; + } + + [captureResource.videoCapturer addListener:captureResource.lensProcessingCore.capturerListener]; + captureResource.videoCapturer.delegate = captureResource.videoCapturerHandler; +} + ++ (void)configImageCapturerWithCaptureResource:(SCCaptureResource *)captureResource +{ + captureResource.stillImageCapturer = [SCManagedStillImageCapturer capturerWithCaptureResource:captureResource]; +} + ++ (void)startRunningWithCaptureResource:(SCCaptureResource *)captureResource + token:(SCCapturerToken *)token + completionHandler:(dispatch_block_t)completionHandler +{ + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsOpen + uniqueId:@"" + stepName:@"startOpenCameraOnManagedCaptureQueue"]; + SCTraceSignal(@"Add token %@ to set %@", token, captureResource.tokenSet); + [captureResource.tokenSet addObject:token]; + if (captureResource.appInBackground) { + SCTraceSignal(@"Will skip startRunning on AVCaptureSession because we are in background"); + } + SCTraceStartSection("start session") + { + if (!SCDeviceSupportsMetal()) { + SCCAssert(captureResource.videoPreviewLayer, @"videoPreviewLayer should be created already"); + if (captureResource.status == SCManagedCapturerStatusReady) { + // Need to wrap this into a CATransaction because startRunning will change + // AVCaptureVideoPreviewLayer, + // therefore, + // without atomic update, will cause layer inconsistency. + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + captureResource.videoPreviewLayer.session = captureResource.managedSession.avSession; + if (!captureResource.appInBackground) { + SCGhostToSnappableSignalCameraStart(); + [captureResource.managedSession startRunning]; + } + [self setupVideoPreviewLayer:captureResource]; + [CATransaction commit]; + SCLogCapturerInfo(@"[_captureResource.avSession startRunning] finished. token: %@", token); + } + // In case we don't use sample buffer, then we need to fake that we know when the first frame receieved. + SCGhostToSnappableSignalDidReceiveFirstPreviewFrame(); + } else { + if (captureResource.status == SCManagedCapturerStatusReady) { + if (!captureResource.appInBackground) { + SCGhostToSnappableSignalCameraStart(); + [captureResource.managedSession startRunning]; + SCLogCapturerInfo( + @"[_captureResource.avSession startRunning] finished using sample buffer. token: %@", token); + } + } + } + } + SCTraceEndSection(); + SCTraceStartSection("start streaming") + { + // Do the start streaming after start running, but make sure we start it + // regardless if the status is ready or + // not. + [self startStreaming:captureResource]; + } + SCTraceEndSection(); + + if (!captureResource.notificationRegistered) { + captureResource.notificationRegistered = YES; + + [captureResource.deviceSubjectAreaHandler startObserving]; + + [[NSNotificationCenter defaultCenter] addObserver:[SCManagedCapturerV1 sharedInstance] + selector:captureResource.sessionRuntimeError + name:AVCaptureSessionRuntimeErrorNotification + object:nil]; + } + + if (captureResource.status == SCManagedCapturerStatusReady) { + // Schedule a timer to check the running state and fix any inconsistency. + runOnMainThreadAsynchronously(^{ + [self setupLivenessConsistencyTimerIfForeground:captureResource]; + }); + SCLogCapturerInfo(@"Setting isRunning to YES. token: %@", token); + captureResource.state = + [[[SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state] setIsRunning:YES] build]; + captureResource.status = SCManagedCapturerStatusRunning; + } + [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsOpen + uniqueId:@"" + stepName:@"endOpenCameraOnManagedCaptureQueue"]; + [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsOpen uniqueId:@"" parameters:nil]; + + SCManagedCapturerState *state = [captureResource.state copy]; + SCTraceResumeToken resumeToken = SCTraceCapture(); + runOnMainThreadAsynchronously(^{ + SCTraceResume(resumeToken); + [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; + [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didStartRunning:state]; + [[SCBatteryLogger shared] logManagedCapturerDidStartRunning]; + if (completionHandler) { + completionHandler(); + } + if (!SCDeviceSupportsMetal()) { + // To approximate this did render timer, it is not accurate. + SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime()); + } + }); +} + ++ (BOOL)stopRunningWithCaptureResource:(SCCaptureResource *)captureResource + token:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler +{ + SCTraceODPCompatibleStart(2); + SCAssert([captureResource.queuePerformer isCurrentPerformer], @""); + BOOL videoPreviewLayerChanged = NO; + SCAssert([captureResource.tokenSet containsObject:token], + @"It should be a valid token that is issued by startRunning method."); + SCTraceSignal(@"Remove token %@, from set %@", token, captureResource.tokenSet); + SCLogCapturerInfo(@"Stop running. token:%@ tokenSet:%@", token, captureResource.tokenSet); + [captureResource.tokenSet removeObject:token]; + BOOL succeed = (captureResource.tokenSet.count == 0); + if (succeed && captureResource.status == SCManagedCapturerStatusRunning) { + captureResource.status = SCManagedCapturerStatusReady; + if (@available(iOS 11.0, *)) { + [captureResource.arSession pause]; + } + [captureResource.managedSession stopRunning]; + if (!SCDeviceSupportsMetal()) { + [captureResource.videoDataSource stopStreaming]; + [self redoVideoPreviewLayer:captureResource]; + videoPreviewLayerChanged = YES; + } else { + [captureResource.videoDataSource pauseStreaming]; + } + + if (captureResource.state.devicePosition == SCManagedCaptureDevicePositionBackDualCamera) { + [[SCManagedCapturerV1 sharedInstance] setDevicePositionAsynchronously:SCManagedCaptureDevicePositionBack + completionHandler:nil + context:SCCapturerContext]; + } + + // We always disable lenses and hide _captureResource.videoPreviewGLView when app goes into + // the background + // thus there is no need to clean up anything. + // _captureResource.videoPreviewGLView will be shown again to the user only when the frame + // will be processed by the lenses + // processor + + // Remove the liveness timer which checks the health of the running state + runOnMainThreadAsynchronously(^{ + [self destroyLivenessConsistencyTimer:captureResource]; + }); + SCLogCapturerInfo(@"Setting isRunning to NO. removed token: %@", token); + captureResource.state = + [[[SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state] setIsRunning:NO] build]; + + captureResource.notificationRegistered = NO; + + [captureResource.deviceSubjectAreaHandler stopObserving]; + + [[NSNotificationCenter defaultCenter] removeObserver:[SCManagedCapturerV1 sharedInstance] + name:AVCaptureSessionRuntimeErrorNotification + object:nil]; + + [captureResource.arSessionHandler stopObserving]; + } + + SCManagedCapturerState *state = [captureResource.state copy]; + AVCaptureVideoPreviewLayer *videoPreviewLayer = videoPreviewLayerChanged ? captureResource.videoPreviewLayer : nil; + runOnMainThreadAsynchronously(^{ + if (succeed) { + [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; + [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didStopRunning:state]; + [[SCBatteryLogger shared] logManagedCapturerDidStopRunning]; + if (videoPreviewLayerChanged) { + [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeVideoPreviewLayer:videoPreviewLayer]; + } + } + if (completionHandler) { + completionHandler(succeed); + } + }); + + return succeed; +} + ++ (void)setupVideoPreviewLayer:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCAssert([resource.queuePerformer isCurrentPerformer] || [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer], + @""); + if ([resource.videoPreviewLayer.connection isVideoOrientationSupported]) { + resource.videoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait; + } + resource.videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; + resource.videoPreviewLayer.hidden = !resource.managedSession.isRunning; + + SCLogCapturerInfo(@"Setup video preview layer with connect.enabled:%d, hidden:%d", + resource.videoPreviewLayer.connection.enabled, resource.videoPreviewLayer.hidden); +} + ++ (void)makeVideoPreviewLayer:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + // This can be called either from current queue or from main queue. + SCAssert([resource.queuePerformer isCurrentPerformer] || [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer], + @""); +#if !TARGET_IPHONE_SIMULATOR + SCAssert(resource.managedSession.avSession, @"session shouldn't be nil"); +#endif + // Need to wrap this to a transcation otherwise this is happening off the main + // thread, and the layer + // won't be lay out correctly. + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + // Since _captureResource.avSession is always created / recreated on this private queue, and + // videoPreviewLayer.session, + // if not touched by anyone else, is also set on this private queue, it should + // be safe to do this + // If-clause check. + resource.videoPreviewLayer = [AVCaptureVideoPreviewLayer new]; + SCAssert(resource.videoPreviewLayer, @"_captureResource.videoPreviewLayer shouldn't be nil"); + [self setupVideoPreviewLayer:resource]; + if (resource.device.softwareZoom && resource.device.zoomFactor != 1) { + [self softwareZoomWithDevice:resource.device resource:resource]; + } + [CATransaction commit]; + SCLogCapturerInfo(@"Created AVCaptureVideoPreviewLayer:%@", resource.videoPreviewLayer); +} + ++ (void)redoVideoPreviewLayer:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"redo video preview layer"); + AVCaptureVideoPreviewLayer *videoPreviewLayer = resource.videoPreviewLayer; + resource.videoPreviewLayer = nil; + // This will do dispatch_sync on the main thread, since mainQueuePerformer + // is reentrant, it should be fine + // on iOS 7. + [[SCQueuePerformer mainQueuePerformer] performAndWait:^{ + // Hide and remove the session when stop the video preview layer at main + // thread. + // It seems that when we nil out the session, it will cause some relayout + // on iOS 9 + // and trigger an assertion. + videoPreviewLayer.hidden = YES; + videoPreviewLayer.session = nil; + // We setup the video preview layer immediately after destroy it so + // that when we start running again, we don't need to pay the setup + // cost. + [self makeVideoPreviewLayer:resource]; + }]; +} + ++ (void)startStreaming:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + ++resource.streamingSequence; + SCLogCapturerInfo(@"Start streaming. streamingSequence:%lu", (unsigned long)resource.streamingSequence); + [resource.videoDataSource startStreaming]; +} + ++ (void)setupLivenessConsistencyTimerIfForeground:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + if (resource.livenessTimer) { + // If we have the liveness timer already, don't need to set it up. + return; + } + // Check if the application state is in background now, if so, we don't need + // to setup liveness timer + if ([UIApplication sharedApplication].applicationState != UIApplicationStateBackground) { + resource.livenessTimer = [NSTimer scheduledTimerWithTimeInterval:1 + target:[SCManagedCapturerV1 sharedInstance] + selector:resource.livenessConsistency + userInfo:nil + repeats:YES]; + } +} + ++ (void)destroyLivenessConsistencyTimer:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + [resource.livenessTimer invalidate]; + resource.livenessTimer = nil; +} + ++ (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device resource:(SCCaptureResource *)resource +{ + [resource.deviceZoomHandler softwareZoomWithDevice:device]; +} + ++ (void)captureStillImageWithCaptureResource:(SCCaptureResource *)captureResource + aspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + if (captureResource.stillImageCapturing) { + SCLogCapturerWarning(@"Another still image is capturing. aspectRatio:%f", aspectRatio); + if (completionHandler) { + SCManagedCapturerState *state = [captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedCapturerErrorDomain + code:kSCManagedCapturerCaptureStillImageBusy + userInfo:nil], + state); + }); + } + } else { + captureResource.stillImageCapturing = YES; + [SCCaptureWorker _captureStillImageAsynchronouslyWithCaptureResource:captureResource + aspectRatio:aspectRatio + captureSessionID:captureSessionID + shouldCaptureFromVideo:shouldCaptureFromVideo + completionHandler:completionHandler]; + } +} + ++ (void)_captureStillImageAsynchronouslyWithCaptureResource:(SCCaptureResource *)captureResource + aspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_capturer_capture_still_image_completion_handler_t) + completionHandler +{ + SCTraceODPCompatibleStart(2); + SCAssert([captureResource.queuePerformer isCurrentPerformer], @""); + SCAssert(completionHandler, @"completionHandler cannot be nil"); + + SCManagedCapturerState *state = [captureResource.state copy]; + SCLogCapturerInfo(@"Capturing still image. aspectRatio:%f state:%@", aspectRatio, state); + // If when we start capturing, the video streamer is not running yet, start + // running it. + [SCCaptureWorker startStreaming:captureResource]; + SCManagedStillImageCapturer *stillImageCapturer = captureResource.stillImageCapturer; + if (@available(iOS 11.0, *)) { + if (state.arSessionActive) { + stillImageCapturer = captureResource.arImageCapturer; + } + } + dispatch_block_t stillImageCaptureHandler = ^{ + SCCAssert(captureResource.stillImageCapturer, @"stillImageCapturer should be available"); + float zoomFactor = captureResource.device.softwareZoom ? captureResource.device.zoomFactor : 1; + [stillImageCapturer + captureStillImageWithAspectRatio:aspectRatio + atZoomFactor:zoomFactor + fieldOfView:captureResource.device.fieldOfView + state:state + captureSessionID:captureSessionID + shouldCaptureFromVideo:shouldCaptureFromVideo + completionHandler:^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) { + SCTraceStart(); + // We are done here, turn off front flash if needed, + // this is dispatched in + // SCManagedCapturer's private queue + if (captureResource.state.flashActive && !captureResource.state.flashSupported && + captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { + captureResource.frontFlashController.flashActive = NO; + } + if (state.devicePosition == SCManagedCaptureDevicePositionFront) { + fullScreenImage = [UIImage + imageWithCGImage:fullScreenImage.CGImage + scale:1.0 + orientation:SCMirroredImageOrientation(fullScreenImage.imageOrientation)]; + } + captureResource.stillImageCapturing = NO; + + runOnMainThreadAsynchronously(^{ + completionHandler(fullScreenImage, metadata, error, state); + }); + }]; + }; + if (state.flashActive && !captureResource.state.flashSupported && + state.devicePosition == SCManagedCaptureDevicePositionFront) { + captureResource.frontFlashController.flashActive = YES; + // Do the first capture only after 0.175 seconds so that the front flash is + // already available + [captureResource.queuePerformer perform:stillImageCaptureHandler after:0.175]; + } else { + stillImageCaptureHandler(); + } +} + ++ (void)startRecordingWithCaptureResource:(SCCaptureResource *)captureResource + outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler +{ + SCTraceODPCompatibleStart(2); + if (captureResource.videoRecording) { + if (completionHandler) { + runOnMainThreadAsynchronously(^{ + completionHandler(SCVideoCaptureSessionInfoMake(kCMTimeInvalid, kCMTimeInvalid, 0), + [NSError errorWithDomain:kSCManagedCapturerErrorDomain + code:kSCManagedCapturerRecordVideoBusy + userInfo:nil]); + }); + } + // Don't start recording session + SCLogCapturerInfo(@"*** Tries to start multiple video recording session ***"); + return; + } + + // Fix: https://jira.sc-corp.net/browse/CCAM-12322 + // Fire this notification in recording state to let PlaybackSession stop + runOnMainThreadAsynchronously(^{ + [[NSNotificationCenter defaultCenter] postNotificationName:kSCImageProcessVideoPlaybackStopNotification + object:[SCManagedCapturer sharedInstance] + userInfo:nil]; + }); + + SCLogCapturerInfo(@"Start recording. OutputSettigns:%@, maxDuration:%f, fileURL:%@", outputSettings, maxDuration, + fileURL); + // Turns on torch temporarily if we have Flash active + if (!captureResource.state.torchActive) { + if (captureResource.state.flashActive) { + [captureResource.device setTorchActive:YES]; + + if (captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { + captureResource.frontFlashController.torchActive = YES; + } + } + } + + if (captureResource.device.softwareZoom) { + captureResource.device.zoomFactor = 1; + [SCCaptureWorker softwareZoomWithDevice:captureResource.device resource:captureResource]; + } + + // Lock focus on both front and back camera if not using ARKit + if (!captureResource.state.arSessionActive) { + SCManagedCaptureDevice *front = [SCManagedCaptureDevice front]; + SCManagedCaptureDevice *back = [SCManagedCaptureDevice back]; + [front setRecording:YES]; + [back setRecording:YES]; + } + // Start streaming if we haven't already + [self startStreaming:captureResource]; + // Remove other listeners from video streamer + [captureResource.videoDataSource removeListener:captureResource.deviceCapacityAnalyzer]; + // If lenses is not actually applied, we should open sticky video tweak + + BOOL isLensApplied = [SCCaptureWorker isLensApplied:captureResource]; + [captureResource.videoDataSource setKeepLateFrames:!isLensApplied]; + SCLogCapturerInfo(@"Start recording. isLensApplied:%d", isLensApplied); + + [captureResource.videoDataSource addListener:captureResource.videoCapturer]; + captureResource.videoRecording = YES; + if (captureResource.state.lensesActive) { + BOOL modifySource = captureResource.videoRecording || captureResource.state.liveVideoStreaming; + [captureResource.lensProcessingCore setModifySource:modifySource]; + } + + if (captureResource.fileInputDecider.shouldProcessFileInput) { + [captureResource.videoDataSource stopStreaming]; + } + // The max video duration, we will stop process sample buffer if the current + // time is larger than max video duration. + // 0.5 so that we have a bit of lean way on video recording initialization, and + // when NSTimer stucked in normal + // recording sessions, we don't suck too much as breaking expections on how long + // it is recorded. + SCVideoCaptureSessionInfo sessionInfo = [captureResource.videoCapturer + startRecordingAsynchronouslyWithOutputSettings:outputSettings + audioConfiguration:configuration + maxDuration:maxDuration + 0.5 + toURL:fileURL + deviceFormat:captureResource.device.activeFormat + orientation:AVCaptureVideoOrientationLandscapeLeft + captureSessionID:captureSessionID]; + + if (completionHandler) { + runOnMainThreadAsynchronously(^{ + completionHandler(sessionInfo, nil); + }); + } + + captureResource.droppedFramesReporter = [SCManagedDroppedFramesReporter new]; + [captureResource.videoDataSource addListener:captureResource.droppedFramesReporter]; + [[SCManagedCapturerV1 sharedInstance] addListener:captureResource.droppedFramesReporter]; +} + ++ (void)stopRecordingWithCaptureResource:(SCCaptureResource *)captureResource +{ + SCTraceStart(); + SCLogCapturerInfo(@"Stop recording asynchronously"); + [captureResource.videoCapturer stopRecordingAsynchronously]; + + [captureResource.videoDataSource removeListener:captureResource.droppedFramesReporter]; + SCManagedDroppedFramesReporter *droppedFramesReporter = captureResource.droppedFramesReporter; + [[SCManagedCapturerV1 sharedInstance] removeListener:captureResource.droppedFramesReporter]; + captureResource.droppedFramesReporter = nil; + + [captureResource.videoDataSource.performer perform:^{ + // call on the same performer as that of managedVideoDataSource: didOutputSampleBuffer: devicePosition: + BOOL keepLateFrames = [captureResource.videoDataSource getKeepLateFrames]; + [droppedFramesReporter reportWithKeepLateFrames:keepLateFrames + lensesApplied:[SCCaptureWorker isLensApplied:captureResource]]; + // Disable keepLateFrames once stop recording to make sure the recentness of preview + [captureResource.videoDataSource setKeepLateFrames:NO]; + }]; +} + ++ (void)cancelRecordingWithCaptureResource:(SCCaptureResource *)captureResource +{ + SCTraceStart(); + SCLogCapturerInfo(@"Cancel recording asynchronously"); + [captureResource.videoDataSource removeListener:captureResource.droppedFramesReporter]; + [[SCManagedCapturerV1 sharedInstance] removeListener:captureResource.droppedFramesReporter]; + captureResource.droppedFramesReporter = nil; + + [captureResource.videoDataSource removeListener:captureResource.videoCapturer]; + // Add back other listeners to video streamer + [captureResource.videoDataSource addListener:captureResource.deviceCapacityAnalyzer]; + [captureResource.videoCapturer cancelRecordingAsynchronously]; + + captureResource.droppedFramesReporter = nil; +} + ++ (SCVideoCaptureSessionInfo)activeSession:(SCCaptureResource *)resource +{ + if (resource.videoCapturer == nil) { + SCLogCapturerWarning( + @"Trying to retrieve SCVideoCaptureSessionInfo while _captureResource.videoCapturer is nil."); + return SCVideoCaptureSessionInfoMake(kCMTimeInvalid, kCMTimeInvalid, 0); + } else { + return resource.videoCapturer.activeSession; + } +} + ++ (BOOL)canRunARSession:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + if (@available(iOS 11.0, *)) { + return resource.state.lensesActive && + [ARConfiguration sc_supportedForDevicePosition:resource.state.devicePosition]; + } + return NO; +} + ++ (void)turnARSessionOff:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCAssert([resource.queuePerformer isCurrentPerformer], @""); + if (@available(iOS 11.0, *)) { + SC_GUARD_ELSE_RETURN(resource.state.arSessionActive); + SCLogCapturerInfo(@"Stopping ARSession"); + + [resource.arSessionHandler stopARSessionRunning]; + [resource.managedSession performConfiguration:^{ + [resource.device updateActiveFormatWithSession:resource.managedSession.avSession]; + }]; + [resource.managedSession startRunning]; + resource.state = + [[[SCManagedCapturerStateBuilder withManagedCapturerState:resource.state] setArSessionActive:NO] build]; + [resource.lensProcessingCore setShouldProcessARFrames:resource.state.arSessionActive]; + [self clearARKitData:resource]; + [self updateLensesFieldOfViewTracking:resource]; + runOnMainThreadAsynchronously(^{ + [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:resource.state]; + [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeARSessionActive:resource.state]; + [[SCManagedCapturerV1 sharedInstance] unlockZoomWithContext:SCCapturerContext]; + }); + }; +} + ++ (void)clearARKitData:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + if (@available(iOS 11.0, *)) { + if ([resource.videoDataSource conformsToProtocol:@protocol(SCManagedVideoARDataSource)]) { + id dataSource = (id)resource.videoDataSource; + dataSource.currentFrame = nil; +#ifdef SC_USE_ARKIT_FACE + dataSource.lastDepthData = nil; +#endif + } + } +} + ++ (void)turnARSessionOn:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCAssert([resource.queuePerformer isCurrentPerformer], @""); + if (@available(iOS 11.0, *)) { + SC_GUARD_ELSE_RETURN(!resource.state.arSessionActive); + SC_GUARD_ELSE_RETURN([self canRunARSession:resource]); + SCLogCapturerInfo(@"Starting ARSession"); + resource.state = + [[[SCManagedCapturerStateBuilder withManagedCapturerState:resource.state] setArSessionActive:YES] build]; + // Make sure we commit any configurations that may be in flight. + [resource.videoDataSource commitConfiguration]; + + runOnMainThreadAsynchronously(^{ + [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:resource.state]; + [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeARSessionActive:resource.state]; + // Zooming on an ARSession breaks stuff in super weird ways. + [[SCManagedCapturerV1 sharedInstance] lockZoomWithContext:SCCapturerContext]; + }); + [self clearARKitData:resource]; + [resource.managedSession stopRunning]; + [resource.arSession + runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:resource.state.devicePosition] + options:(ARSessionRunOptionResetTracking | ARSessionRunOptionRemoveExistingAnchors)]; + + [resource.lensProcessingCore setShouldProcessARFrames:resource.state.arSessionActive]; + [self updateLensesFieldOfViewTracking:resource]; + } +} + ++ (void)configBlackCameraDetectorWithCaptureResource:(SCCaptureResource *)captureResource +{ + captureResource.captureSessionFixer = [[SCCaptureSessionFixer alloc] init]; + captureResource.blackCameraDetector.blackCameraNoOutputDetector.delegate = captureResource.captureSessionFixer; + [captureResource.videoDataSource addListener:captureResource.blackCameraDetector.blackCameraNoOutputDetector]; +} + ++ (void)configureCaptureFaceDetectorWithCaptureResource:(SCCaptureResource *)captureResource +{ + if (SCCameraFaceFocusDetectionMethod() == SCCameraFaceFocusDetectionMethodTypeCIDetector) { + SCCaptureCoreImageFaceDetector *detector = + [[SCCaptureCoreImageFaceDetector alloc] initWithCaptureResource:captureResource]; + captureResource.captureFaceDetector = detector; + [captureResource.videoDataSource addListener:detector]; + } else { + captureResource.captureFaceDetector = + [[SCCaptureMetadataOutputDetector alloc] initWithCaptureResource:captureResource]; + } +} + ++ (void)configCaptureDeviceHandlerWithCaptureResource:(SCCaptureResource *)captureResource +{ + captureResource.device.delegate = captureResource.captureDeviceHandler; +} + ++ (void)updateLensesFieldOfViewTracking:(SCCaptureResource *)captureResource +{ + // 1. reset observers + [captureResource.lensProcessingCore removeFieldOfViewListener]; + + if (@available(iOS 11.0, *)) { + if (captureResource.state.arSessionActive && + [captureResource.videoDataSource conformsToProtocol:@protocol(SCManagedVideoARDataSource)]) { + // 2. handle ARKit case + id arDataSource = + (id)captureResource.videoDataSource; + float fieldOfView = arDataSource.fieldOfView; + if (fieldOfView > 0) { + // 2.5 there will be no field of view + [captureResource.lensProcessingCore setFieldOfView:fieldOfView]; + } + [captureResource.lensProcessingCore setAsFieldOfViewListenerForARDataSource:arDataSource]; + return; + } + } + // 3. fallback to regular device field of view + float fieldOfView = captureResource.device.fieldOfView; + [captureResource.lensProcessingCore setFieldOfView:fieldOfView]; + [captureResource.lensProcessingCore setAsFieldOfViewListenerForDevice:captureResource.device]; +} + ++ (CMTime)firstWrittenAudioBufferDelay:(SCCaptureResource *)resource +{ + return resource.videoCapturer.firstWrittenAudioBufferDelay; +} + ++ (BOOL)audioQueueStarted:(SCCaptureResource *)resource +{ + return resource.videoCapturer.audioQueueStarted; +} + ++ (BOOL)isLensApplied:(SCCaptureResource *)resource +{ + return resource.state.lensesActive && resource.lensProcessingCore.isLensApplied; +} + ++ (BOOL)isVideoMirrored:(SCCaptureResource *)resource +{ + if ([resource.videoDataSource respondsToSelector:@selector(isVideoMirrored)]) { + return [resource.videoDataSource isVideoMirrored]; + } else { + // Default is NO. + return NO; + } +} + ++ (BOOL)shouldCaptureImageFromVideoWithResource:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + BOOL isIphone5Series = [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer]; + return isIphone5Series && !resource.state.flashActive && ![SCCaptureWorker isLensApplied:resource]; +} + ++ (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest + completionHandler:(dispatch_block_t)completionHandler + resource:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + if (resource.state.isPortraitModeActive) { + SCTraceODPCompatibleStart(2); + [resource.queuePerformer perform:^{ + SCTraceStart(); + if (resource.device.isConnected) { + if (resource.device.softwareZoom) { + CGPoint adjustedPoint = CGPointMake((pointOfInterest.x - 0.5) / resource.device.softwareZoom + 0.5, + (pointOfInterest.y - 0.5) / resource.device.softwareZoom + 0.5); + // Fix for the zooming factor + [resource.videoDataSource setPortraitModePointOfInterest:adjustedPoint]; + if (resource.state.arSessionActive) { + if (@available(ios 11.0, *)) { + [resource.arImageCapturer setPortraitModePointOfInterest:adjustedPoint]; + } + } else { + [resource.stillImageCapturer setPortraitModePointOfInterest:adjustedPoint]; + } + } else { + [resource.videoDataSource setPortraitModePointOfInterest:pointOfInterest]; + if (resource.state.arSessionActive) { + if (@available(ios 11.0, *)) { + [resource.arImageCapturer setPortraitModePointOfInterest:pointOfInterest]; + } + } else { + [resource.stillImageCapturer setPortraitModePointOfInterest:pointOfInterest]; + } + } + } + if (completionHandler) { + runOnMainThreadAsynchronously(completionHandler); + } + }]; + } +} + ++ (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration + resource:(SCCaptureResource *)resource +{ + SCAssertPerformer(resource.queuePerformer); + [resource.videoCapturer prepareForRecordingWithAudioConfiguration:configuration]; +} + ++ (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Stop scan"); + [resource.videoScanner stopScanAsynchronously]; + if (completionHandler) { + runOnMainThreadAsynchronously(completionHandler); + } +} + ++ (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration resource:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Start scan. ScanConfiguration:%@", configuration); + [SCCaptureWorker startStreaming:resource]; + [resource.videoScanner startScanAsynchronouslyWithScanConfiguration:configuration]; +} +@end