Add files via upload

This commit is contained in:
Khaled Alshehri 2018-08-08 02:27:52 +03:00 committed by GitHub
parent 99f9f2a76f
commit db9d4071ce
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
98 changed files with 12005 additions and 0 deletions

View File

@ -0,0 +1,147 @@
//
// SCCaptureDeviceResolver.m
// Snapchat
//
// Created by Lin Jia on 11/8/17.
//
//
#import "SCCaptureDeviceResolver.h"
#import "SCCameraTweaks.h"
#import <SCBase/SCAvailability.h>
#import <SCFoundation/SCAssertWrapper.h>
@interface SCCaptureDeviceResolver () {
AVCaptureDeviceDiscoverySession *_discoverySession;
}
@end
@implementation SCCaptureDeviceResolver
+ (instancetype)sharedInstance
{
static SCCaptureDeviceResolver *resolver;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
resolver = [[SCCaptureDeviceResolver alloc] init];
});
return resolver;
}
- (instancetype)init
{
self = [super init];
if (self) {
NSMutableArray *deviceTypes = [[NSMutableArray alloc] init];
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
if (SC_AT_LEAST_IOS_10_2) {
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInDualCamera];
}
// TODO: we should KVO _discoverySession.devices.
_discoverySession =
[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
}
return self;
}
- (AVCaptureDevice *)findAVCaptureDevice:(AVCaptureDevicePosition)position
{
SCAssert(position == AVCaptureDevicePositionFront || position == AVCaptureDevicePositionBack, @"");
AVCaptureDevice *captureDevice;
if (position == AVCaptureDevicePositionFront) {
captureDevice = [self _pickBestFrontCamera:[_discoverySession.devices copy]];
} else if (position == AVCaptureDevicePositionBack) {
captureDevice = [self _pickBestBackCamera:[_discoverySession.devices copy]];
}
if (captureDevice) {
return captureDevice;
}
if (SC_AT_LEAST_IOS_10_2 && SCCameraTweaksEnableDualCamera()) {
captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera
mediaType:AVMediaTypeVideo
position:position];
if (captureDevice) {
return captureDevice;
}
}
// if code still execute, discoverSession failed, then we keep searching.
captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera
mediaType:AVMediaTypeVideo
position:position];
if (captureDevice) {
return captureDevice;
}
#if !TARGET_IPHONE_SIMULATOR
// We do not return nil at the beginning of the function for simulator, because simulators of different IOS
// versions can check whether or not our camera device API access is correct.
SCAssertFail(@"No camera is found.");
#endif
return nil;
}
- (AVCaptureDevice *)_pickBestFrontCamera:(NSArray<AVCaptureDevice *> *)devices
{
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionFront) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *)_pickBestBackCamera:(NSArray<AVCaptureDevice *> *)devices
{
// Look for dual camera first if needed. If dual camera not found, continue to look for wide angle camera.
if (SC_AT_LEAST_IOS_10_2 && SCCameraTweaksEnableDualCamera()) {
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionBack &&
device.deviceType == AVCaptureDeviceTypeBuiltInDualCamera) {
return device;
}
}
}
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionBack &&
device.deviceType == AVCaptureDeviceTypeBuiltInWideAngleCamera) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *)findDualCamera
{
if (SC_AT_LEAST_IOS_10_2) {
for (AVCaptureDevice *device in [_discoverySession.devices copy]) {
if (device.position == AVCaptureDevicePositionBack &&
device.deviceType == AVCaptureDeviceTypeBuiltInDualCamera) {
return device;
}
}
}
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionBack];
if (captureDevice) {
return captureDevice;
}
#if !TARGET_IPHONE_SIMULATOR
// We do not return nil at the beginning of the function for simulator, because simulators of different IOS
// versions can check whether or not our camera device API access is correct.
SCAssertFail(@"No camera is found.");
#endif
return nil;
}
@end

View File

@ -0,0 +1,43 @@
//
// SCCaptureFaceDetectionParser.h
// Snapchat
//
// Created by Jiyang Zhu on 3/13/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class offers methods to parse face bounds from raw data, e.g., AVMetadataObject, CIFeature.
#import <SCBase/SCMacros.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreImage/CoreImage.h>
@interface SCCaptureFaceDetectionParser : NSObject
SC_INIT_AND_NEW_UNAVAILABLE;
- (instancetype)initWithFaceBoundsAreaThreshold:(CGFloat)minimumArea;
/**
Parse face bounds from AVMetadataObject.
@param metadataObjects An array of AVMetadataObject.
@return A dictionary, value is faceBounds: CGRect, key is faceID: NSString.
*/
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:
(NSArray<__kindof AVMetadataObject *> *)metadataObjects;
/**
Parse face bounds from CIFeature.
@param features An array of CIFeature.
@param imageSize Size of the image, where the feature are detected from.
@param imageOrientation Orientation of the image.
@return A dictionary, value is faceBounds: CGRect, key is faceID: NSString.
*/
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromCIFeatures:(NSArray<__kindof CIFeature *> *)features
withImageSize:(CGSize)imageSize
imageOrientation:
(CGImagePropertyOrientation)imageOrientation;
@end

View File

@ -0,0 +1,94 @@
//
// SCCaptureFaceDetectionParser.m
// Snapchat
//
// Created by Jiyang Zhu on 3/13/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCCaptureFaceDetectionParser.h"
#import <SCFoundation/NSArray+Helpers.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@implementation SCCaptureFaceDetectionParser {
CGFloat _minimumArea;
}
- (instancetype)initWithFaceBoundsAreaThreshold:(CGFloat)minimumArea
{
self = [super init];
if (self) {
_minimumArea = minimumArea;
}
return self;
}
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:
(NSArray<__kindof AVMetadataObject *> *)metadataObjects
{
SCTraceODPCompatibleStart(2);
NSMutableArray *faceObjects = [NSMutableArray array];
[metadataObjects
enumerateObjectsUsingBlock:^(__kindof AVMetadataObject *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) {
if ([obj isKindOfClass:[AVMetadataFaceObject class]]) {
[faceObjects addObject:obj];
}
}];
SC_GUARD_ELSE_RETURN_VALUE(faceObjects.count > 0, nil);
NSMutableDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
[NSMutableDictionary dictionaryWithCapacity:faceObjects.count];
for (AVMetadataFaceObject *faceObject in faceObjects) {
CGRect bounds = faceObject.bounds;
if (CGRectGetWidth(bounds) * CGRectGetHeight(bounds) >= _minimumArea) {
[faceBoundsByFaceID setObject:[NSValue valueWithCGRect:bounds] forKey:@(faceObject.faceID)];
}
}
return faceBoundsByFaceID;
}
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromCIFeatures:(NSArray<__kindof CIFeature *> *)features
withImageSize:(CGSize)imageSize
imageOrientation:
(CGImagePropertyOrientation)imageOrientation
{
SCTraceODPCompatibleStart(2);
NSArray<CIFaceFeature *> *faceFeatures = [features filteredArrayUsingBlock:^BOOL(id _Nonnull evaluatedObject) {
return [evaluatedObject isKindOfClass:[CIFaceFeature class]];
}];
SC_GUARD_ELSE_RETURN_VALUE(faceFeatures.count > 0, nil);
NSMutableDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
[NSMutableDictionary dictionaryWithCapacity:faceFeatures.count];
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
SCLogGeneralInfo(@"Face feature count:%d", faceFeatures.count);
for (CIFaceFeature *faceFeature in faceFeatures) {
SCLogGeneralInfo(@"Face feature: hasTrackingID:%d, bounds:%@", faceFeature.hasTrackingID,
NSStringFromCGRect(faceFeature.bounds));
if (faceFeature.hasTrackingID) {
CGRect transferredBounds;
// Somehow the detected bounds for back camera is mirrored.
if (imageOrientation == kCGImagePropertyOrientationRight) {
transferredBounds = CGRectMake(
CGRectGetMinX(faceFeature.bounds) / width, 1 - CGRectGetMaxY(faceFeature.bounds) / height,
CGRectGetWidth(faceFeature.bounds) / width, CGRectGetHeight(faceFeature.bounds) / height);
} else {
transferredBounds = CGRectMake(
CGRectGetMinX(faceFeature.bounds) / width, CGRectGetMinY(faceFeature.bounds) / height,
CGRectGetWidth(faceFeature.bounds) / width, CGRectGetHeight(faceFeature.bounds) / height);
}
if (CGRectGetWidth(transferredBounds) * CGRectGetHeight(transferredBounds) >= _minimumArea) {
[faceBoundsByFaceID setObject:[NSValue valueWithCGRect:transferredBounds]
forKey:@(faceFeature.trackingID)];
}
}
}
return faceBoundsByFaceID;
}
@end

View File

@ -0,0 +1,31 @@
//
// SCCaptureFaceDetector.h
// Snapchat
//
// Created by Jiyang Zhu on 3/27/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This protocol declares properties and methods that are used for face detectors.
#import <Foundation/Foundation.h>
@class SCCaptureResource;
@class SCQueuePerformer;
@class SCCaptureFaceDetectorTrigger;
@class SCCaptureFaceDetectionParser;
@protocol SCCaptureFaceDetector <NSObject>
@property (nonatomic, strong, readonly) SCCaptureFaceDetectorTrigger *trigger;
@property (nonatomic, strong, readonly) SCCaptureFaceDetectionParser *parser;
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;
- (SCQueuePerformer *)detectionPerformer;
- (void)startDetection;
- (void)stopDetection;
@end

View File

@ -0,0 +1,22 @@
//
// SCCaptureFaceDetectorTrigger.h
// Snapchat
//
// Created by Jiyang Zhu on 3/22/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is used to control when should SCCaptureFaceDetector starts and stops.
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
@protocol SCCaptureFaceDetector;
@interface SCCaptureFaceDetectorTrigger : NSObject
SC_INIT_AND_NEW_UNAVAILABLE;
- (instancetype)initWithDetector:(id<SCCaptureFaceDetector>)detector;
@end

View File

@ -0,0 +1,97 @@
//
// SCCaptureFaceDetectorTrigger.m
// Snapchat
//
// Created by Jiyang Zhu on 3/22/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCCaptureFaceDetectorTrigger.h"
#import "SCCaptureFaceDetector.h"
#import <SCFoundation/SCAppLifecycle.h>
#import <SCFoundation/SCIdleMonitor.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTaskManager.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCCaptureFaceDetectorTrigger () {
id<SCCaptureFaceDetector> __weak _detector;
}
@end
@implementation SCCaptureFaceDetectorTrigger
- (instancetype)initWithDetector:(id<SCCaptureFaceDetector>)detector
{
self = [super init];
if (self) {
_detector = detector;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_applicationDidBecomeActive)
name:kSCPostponedUIApplicationDidBecomeActiveNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_applicationWillResignActive)
name:UIApplicationWillResignActiveNotification
object:nil];
}
return self;
}
#pragma mark - Internal Methods
- (void)_applicationWillResignActive
{
SCTraceODPCompatibleStart(2);
[self _stopDetection];
}
- (void)_applicationDidBecomeActive
{
SCTraceODPCompatibleStart(2);
[self _waitUntilAppStartCompleteToStartDetection];
}
- (void)_waitUntilAppStartCompleteToStartDetection
{
SCTraceODPCompatibleStart(2);
@weakify(self);
if (SCExperimentWithWaitUntilIdleReplacement()) {
[[SCTaskManager sharedManager] addTaskToRunWhenAppIdle:"SCCaptureFaceDetectorTrigger.startDetection"
performer:[_detector detectionPerformer]
block:^{
@strongify(self);
SC_GUARD_ELSE_RETURN(self);
[self _startDetection];
}];
} else {
[[SCIdleMonitor sharedInstance] waitUntilIdleForTag:"SCCaptureFaceDetectorTrigger.startDetection"
callbackQueue:[_detector detectionPerformer].queue
block:^{
@strongify(self);
SC_GUARD_ELSE_RETURN(self);
[self _startDetection];
}];
}
}
- (void)_startDetection
{
SCTraceODPCompatibleStart(2);
[[_detector detectionPerformer] performImmediatelyIfCurrentPerformer:^{
[_detector startDetection];
}];
}
- (void)_stopDetection
{
SCTraceODPCompatibleStart(2);
[[_detector detectionPerformer] performImmediatelyIfCurrentPerformer:^{
[_detector stopDetection];
}];
}
@end

View File

@ -0,0 +1,23 @@
//
// SCCaptureMetadataObjectParser.h
// Snapchat
//
// Created by Jiyang Zhu on 3/13/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class offers class methods to parse AVMetadataObject.
#import <AVFoundation/AVFoundation.h>
@interface SCCaptureMetadataObjectParser : NSObject
/**
Parse face bounds from AVMetadataObject.
@param metadataObjects An array of AVMetadataObject.
@return A dictionary, value is faceBounds: CGRect, key is faceID: NSString.
*/
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:
(NSArray<__kindof AVMetadataObject *> *)metadataObjects;
@end

View File

@ -0,0 +1,38 @@
//
// SCCaptureMetadataObjectParser.m
// Snapchat
//
// Created by Jiyang Zhu on 3/13/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCCaptureMetadataObjectParser.h"
#import <SCBase/SCMacros.h>
@import UIKit;
@implementation SCCaptureMetadataObjectParser
- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:
(NSArray<__kindof AVMetadataObject *> *)metadataObjects
{
NSMutableArray *faceObjects = [NSMutableArray array];
[metadataObjects
enumerateObjectsUsingBlock:^(__kindof AVMetadataObject *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) {
if ([obj isKindOfClass:[AVMetadataFaceObject class]]) {
[faceObjects addObject:obj];
}
}];
SC_GUARD_ELSE_RETURN_VALUE(faceObjects.count > 0, nil);
NSMutableDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
[NSMutableDictionary dictionaryWithCapacity:faceObjects.count];
for (AVMetadataFaceObject *faceObject in faceObjects) {
[faceBoundsByFaceID setObject:[NSValue valueWithCGRect:faceObject.bounds] forKey:@(faceObject.faceID)];
}
return faceBoundsByFaceID;
}
@end

View File

@ -0,0 +1,19 @@
//
// SCCaptureMetadataOutputDetector.h
// Snapchat
//
// Created by Jiyang Zhu on 12/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
// This class is intended to detect faces in Camera. It receives AVMetadataFaceObjects, and announce the bounds and
// faceIDs.
#import "SCCaptureFaceDetector.h"
#import <SCBase/SCMacros.h>
@interface SCCaptureMetadataOutputDetector : NSObject <SCCaptureFaceDetector>
SC_INIT_AND_NEW_UNAVAILABLE;
@end

View File

@ -0,0 +1,175 @@
//
// SCCaptureMetadataOutputDetector.m
// Snapchat
//
// Created by Jiyang Zhu on 12/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCCaptureMetadataOutputDetector.h"
#import "SCCameraTweaks.h"
#import "SCCaptureFaceDetectionParser.h"
#import "SCCaptureFaceDetectorTrigger.h"
#import "SCCaptureResource.h"
#import "SCManagedCaptureSession.h"
#import "SCManagedCapturer.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
#import <SCFoundation/SCZeroDependencyExperiments.h>
#import <SCFoundation/UIImage+CVPixelBufferRef.h>
#define SCLogCaptureMetaDetectorInfo(fmt, ...) \
SCLogCoreCameraInfo(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__)
#define SCLogCaptureMetaDetectorWarning(fmt, ...) \
SCLogCoreCameraWarning(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__)
#define SCLogCaptureMetaDetectorError(fmt, ...) \
SCLogCoreCameraError(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__)
static char *const kSCCaptureMetadataOutputDetectorProcessQueue =
"com.snapchat.capture-metadata-output-detector-process";
static const NSInteger kDefaultNumberOfSequentialFramesWithFaces = -1; // -1 means no sequential frames with faces.
@interface SCCaptureMetadataOutputDetector () <AVCaptureMetadataOutputObjectsDelegate>
@end
@implementation SCCaptureMetadataOutputDetector {
BOOL _isDetecting;
AVCaptureMetadataOutput *_metadataOutput;
SCCaptureResource *_captureResource;
SCCaptureFaceDetectionParser *_parser;
NSInteger _numberOfSequentialFramesWithFaces;
NSUInteger _detectionFrequency;
SCQueuePerformer *_callbackPerformer;
SCQueuePerformer *_metadataProcessPerformer;
SCCaptureFaceDetectorTrigger *_trigger;
}
@synthesize trigger = _trigger;
@synthesize parser = _parser;
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
SCTraceODPCompatibleStart(2);
self = [super init];
if (self) {
SCAssert(captureResource, @"SCCaptureResource should not be nil");
SCAssert(captureResource.managedSession.avSession, @"AVCaptureSession should not be nil");
SCAssert(captureResource.queuePerformer, @"SCQueuePerformer should not be nil");
_metadataOutput = [AVCaptureMetadataOutput new];
_callbackPerformer = captureResource.queuePerformer;
_captureResource = captureResource;
_detectionFrequency = SCExperimentWithFaceDetectionFrequency();
_parser = [[SCCaptureFaceDetectionParser alloc]
initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)];
_metadataProcessPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureMetadataOutputDetectorProcessQueue
qualityOfService:QOS_CLASS_DEFAULT
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
if ([self _initDetection]) {
_trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self];
}
}
return self;
}
- (AVCaptureSession *)_captureSession
{
// _captureResource.avSession may change, so we don't retain any specific AVCaptureSession.
return _captureResource.managedSession.avSession;
}
- (BOOL)_initDetection
{
BOOL success = NO;
if ([[self _captureSession] canAddOutput:_metadataOutput]) {
[[self _captureSession] addOutput:_metadataOutput];
if ([_metadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeFace]) {
_numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces;
_metadataOutput.metadataObjectTypes = @[ AVMetadataObjectTypeFace ];
success = YES;
SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully enabled.");
} else {
[[self _captureSession] removeOutput:_metadataOutput];
success = NO;
SCLogCaptureMetaDetectorError(@"AVMetadataObjectTypeFace is not available for "
@"AVMetadataOutput[%@]",
_metadataOutput);
}
} else {
success = NO;
SCLogCaptureMetaDetectorError(@"AVCaptureSession[%@] cannot add AVMetadataOutput[%@] as an output",
[self _captureSession], _metadataOutput);
}
return success;
}
- (void)startDetection
{
SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -startDetection in an invalid queue.");
SC_GUARD_ELSE_RETURN(!_isDetecting);
[_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{
[_metadataOutput setMetadataObjectsDelegate:self queue:_metadataProcessPerformer.queue];
_isDetecting = YES;
SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully enabled.");
}];
}
- (void)stopDetection
{
SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -stopDetection in an invalid queue.");
SC_GUARD_ELSE_RETURN(_isDetecting);
[_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{
[_metadataOutput setMetadataObjectsDelegate:nil queue:NULL];
_isDetecting = NO;
SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully disabled.");
}];
}
- (SCQueuePerformer *)detectionPerformer
{
return _captureResource.queuePerformer;
}
#pragma mark - AVCaptureMetadataOutputObjectsDelegate
- (void)captureOutput:(AVCaptureOutput *)output
didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
{
SCTraceODPCompatibleStart(2);
BOOL shouldNotify = NO;
if (metadataObjects.count == 0 &&
_numberOfSequentialFramesWithFaces !=
kDefaultNumberOfSequentialFramesWithFaces) { // There were faces detected before, but there is no face right
// now, so send out the notification.
_numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces;
shouldNotify = YES;
} else if (metadataObjects.count > 0) {
_numberOfSequentialFramesWithFaces++;
shouldNotify = (_numberOfSequentialFramesWithFaces % _detectionFrequency == 0);
}
SC_GUARD_ELSE_RETURN(shouldNotify);
NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
[_parser parseFaceBoundsByFaceIDFromMetadataObjects:metadataObjects];
[_callbackPerformer perform:^{
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didDetectFaceBounds:faceBoundsByFaceID];
}];
}
@end

View File

@ -0,0 +1,225 @@
//
// SCManagedCapturer.h
// Snapchat
//
// Created by Liu Liu on 4/20/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import "SCCaptureCommon.h"
#import "SCSnapCreationTriggers.h"
#import <SCAudio/SCAudioConfiguration.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#define SCCapturerContext [NSString sc_stringWithFormat:@"%s/%d", __FUNCTION__, __LINE__]
@class SCBlackCameraDetector;
@protocol SCManagedCapturerListener
, SCManagedCapturerLensAPI, SCDeviceMotionProvider, SCFileInputDecider, SCManagedCapturerARImageCaptureProvider,
SCManagedCapturerGLViewManagerAPI, SCManagedCapturerLensAPIProvider, SCManagedCapturerLSAComponentTrackerAPI,
SCManagedCapturePreviewLayerControllerDelegate;
@protocol SCCapturer <NSObject>
@property (nonatomic, readonly) SCBlackCameraDetector *blackCameraDetector;
/**
* Returns id<SCLensProcessingCore> for the current capturer.
*/
- (id<SCManagedCapturerLensAPI>)lensProcessingCore;
- (CMTime)firstWrittenAudioBufferDelay;
- (BOOL)audioQueueStarted;
- (BOOL)isLensApplied;
- (BOOL)isVideoMirrored;
- (SCVideoCaptureSessionInfo)activeSession;
#pragma mark - Outside resources
- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector
deviceMotionProvider:(id<SCDeviceMotionProvider>)deviceMotionProvider
fileInputDecider:(id<SCFileInputDecider>)fileInputDecider
arImageCaptureProvider:(id<SCManagedCapturerARImageCaptureProvider>)arImageCaptureProvider
glviewManager:(id<SCManagedCapturerGLViewManagerAPI>)glViewManager
lensAPIProvider:(id<SCManagedCapturerLensAPIProvider>)lensAPIProvider
lsaComponentTracker:(id<SCManagedCapturerLSAComponentTrackerAPI>)lsaComponentTracker
managedCapturerPreviewLayerControllerDelegate:
(id<SCManagedCapturePreviewLayerControllerDelegate>)previewLayerControllerDelegate;
#pragma mark - Setup, Start & Stop
// setupWithDevicePositionAsynchronously will be called on the main thread, executed off the main thread, exactly once
- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
/**
* Important: Remember to call stopRunningAsynchronously to stop the capture session. Dismissing the view is not enough
* @param identifier is for knowing the callsite. Pass in the classname of the callsite is generally suggested.
* Currently it is used for debugging purposes. In other words the capture session will work without it.
*/
- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)stopRunningAsynchronously:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)stopRunningAsynchronously:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
after:(NSTimeInterval)delay
context:(NSString *)context;
- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController
context:(NSString *)context;
#pragma mark - Recording / Capture
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
captureSessionID:(NSString *)captureSessionID
completionHandler:
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
context:(NSString *)context;
/**
* Unlike captureStillImageAsynchronouslyWithAspectRatio, this captures a single frame from the ongoing video
* stream. This should be faster but lower quality (and smaller size), and does not play the shutter sound.
*/
- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler:
(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context
audioConfiguration:(SCAudioConfiguration *)configuration;
- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
audioConfiguration:(SCAudioConfiguration *)configuration
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:
(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context;
- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context;
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context;
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context;
- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler
context:(NSString *)context;
// AddTimedTask will schedule a task to run, it is thread safe API. Your task will run on main thread, so it is not
// recommended to add large amount of tasks which all have the same task target time.
- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context;
// clearTimedTasks will cancel the tasks, it is thread safe API.
- (void)clearTimedTasksWithContext:(NSString *)context;
#pragma mark - Utilities
- (void)convertViewCoordinates:(CGPoint)viewCoordinates
completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler
context:(NSString *)context;
- (void)detectLensCategoryOnNextFrame:(CGPoint)point
lenses:(NSArray<SCLens *> *)lenses
completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion
context:(NSString *)context;
#pragma mark - Configurations
- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setFlashActive:(BOOL)flashActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setLensesActive:(BOOL)lensesActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setLensesActive:(BOOL)lensesActive
filterFactory:(SCLookseryFilterFactory *)filterFactory
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setLensesInTalkActive:(BOOL)lensesActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setTorchActiveAsynchronously:(BOOL)torchActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setNightModeActiveAsynchronously:(BOOL)active
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)lockZoomWithContext:(NSString *)context;
- (void)unlockZoomWithContext:(NSString *)context;
- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context;
- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
context:(NSString *)context;
- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest
fromUser:(BOOL)fromUser
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context;
// I need to call these three methods from SCAppDelegate explicitly so that I get the latest information.
- (void)applicationDidEnterBackground;
- (void)applicationWillEnterForeground;
- (void)applicationDidBecomeActive;
- (void)applicationWillResignActive;
- (void)mediaServicesWereReset;
- (void)mediaServicesWereLost;
#pragma mark - Add / Remove Listener
- (void)addListener:(id<SCManagedCapturerListener>)listener;
- (void)removeListener:(id<SCManagedCapturerListener>)listener;
- (void)addVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener;
- (void)removeVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener;
- (void)addDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;
- (void)removeDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;
- (NSString *)debugInfo;
- (id<SCManagedVideoDataSource>)currentVideoDataSource;
- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback;
// Need to be visible so that classes like SCCaptureSessionFixer can manage capture session
- (void)recreateAVCaptureSession;
#pragma mark - Snap Creation triggers
- (SCSnapCreationTriggers *)snapCreationTriggers;
@optional
- (BOOL)authorizedForVideoCapture;
- (void)preloadVideoCaptureAuthorization;
@end

View File

@ -0,0 +1,44 @@
//
// SCCapturerBufferedVideoWriter.h
// Snapchat
//
// Created by Chao Pang on 12/5/17.
//
#import <SCFoundation/SCQueuePerformer.h>
#import <SCManagedVideoCapturerOutputSettings.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
@protocol SCCapturerBufferedVideoWriterDelegate <NSObject>
- (void)videoWriterDidFailWritingWithError:(NSError *)error;
@end
@interface SCCapturerBufferedVideoWriter : NSObject
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPerformer:(id<SCPerforming>)performer
outputURL:(NSURL *)outputURL
delegate:(id<SCCapturerBufferedVideoWriterDelegate>)delegate
error:(NSError **)error;
- (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings;
- (void)startWritingAtSourceTime:(CMTime)sourceTime;
- (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock;
- (void)cancelWriting;
- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void)cleanUp;
@end

View File

@ -0,0 +1,430 @@
//
// SCCapturerBufferedVideoWriter.m
// Snapchat
//
// Created by Chao Pang on 12/5/17.
//
#import "SCCapturerBufferedVideoWriter.h"
#import "SCAudioCaptureSession.h"
#import "SCCaptureCommon.h"
#import "SCManagedCapturerUtils.h"
#import <SCBase/SCMacros.h>
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCDeviceName.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCTrace.h>
#import <FBKVOController/FBKVOController.h>
@implementation SCCapturerBufferedVideoWriter {
SCQueuePerformer *_performer;
__weak id<SCCapturerBufferedVideoWriterDelegate> _delegate;
FBKVOController *_observeController;
AVAssetWriter *_assetWriter;
AVAssetWriterInput *_audioWriterInput;
AVAssetWriterInput *_videoWriterInput;
AVAssetWriterInputPixelBufferAdaptor *_pixelBufferAdaptor;
CVPixelBufferPoolRef _defaultPixelBufferPool;
CVPixelBufferPoolRef _nightPixelBufferPool;
CVPixelBufferPoolRef _lensesPixelBufferPool;
CMBufferQueueRef _videoBufferQueue;
CMBufferQueueRef _audioBufferQueue;
}
- (instancetype)initWithPerformer:(id<SCPerforming>)performer
outputURL:(NSURL *)outputURL
delegate:(id<SCCapturerBufferedVideoWriterDelegate>)delegate
error:(NSError **)error
{
self = [super init];
if (self) {
_performer = performer;
_delegate = delegate;
_observeController = [[FBKVOController alloc] initWithObserver:self];
CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(),
&_videoBufferQueue);
CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(),
&_audioBufferQueue);
_assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:error];
if (*error) {
self = nil;
return self;
}
}
return self;
}
- (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
{
SCTraceStart();
SCAssert([_performer isCurrentPerformer], @"");
SCAssert(outputSettings, @"empty output setting");
// Audio
SCTraceSignal(@"Derive audio output setting");
NSDictionary *audioOutputSettings = @{
AVFormatIDKey : @(kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey : @(1),
AVSampleRateKey : @(kSCAudioCaptureSessionDefaultSampleRate),
AVEncoderBitRateKey : @(outputSettings.audioBitRate)
};
_audioWriterInput =
[[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
_audioWriterInput.expectsMediaDataInRealTime = YES;
// Video
SCTraceSignal(@"Derive video output setting");
size_t outputWidth = outputSettings.width;
size_t outputHeight = outputSettings.height;
SCAssert(outputWidth > 0 && outputHeight > 0 && (outputWidth % 2 == 0) && (outputHeight % 2 == 0),
@"invalid output size");
NSDictionary *videoCompressionSettings = @{
AVVideoAverageBitRateKey : @(outputSettings.videoBitRate),
AVVideoMaxKeyFrameIntervalKey : @(outputSettings.keyFrameInterval)
};
NSDictionary *videoOutputSettings = @{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : @(outputWidth),
AVVideoHeightKey : @(outputHeight),
AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill,
AVVideoCompressionPropertiesKey : videoCompressionSettings
};
_videoWriterInput =
[[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoOutputSettings];
_videoWriterInput.expectsMediaDataInRealTime = YES;
CGAffineTransform transform = CGAffineTransformMakeTranslation(outputHeight, 0);
_videoWriterInput.transform = CGAffineTransformRotate(transform, M_PI_2);
_pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:_videoWriterInput
sourcePixelBufferAttributes:@{
(NSString *)
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)
kCVPixelBufferWidthKey : @(outputWidth), (NSString *)
kCVPixelBufferHeightKey : @(outputHeight)
}];
SCTraceSignal(@"Setup video writer input");
if ([_assetWriter canAddInput:_videoWriterInput]) {
[_assetWriter addInput:_videoWriterInput];
} else {
return NO;
}
SCTraceSignal(@"Setup audio writer input");
if ([_assetWriter canAddInput:_audioWriterInput]) {
[_assetWriter addInput:_audioWriterInput];
} else {
return NO;
}
return YES;
}
- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
SCAssert([_performer isCurrentPerformer], @"");
SC_GUARD_ELSE_RETURN(sampleBuffer);
if (!CMBufferQueueIsEmpty(_videoBufferQueue)) {
// We need to drain the buffer queue in this case
while (_videoWriterInput.readyForMoreMediaData) { // TODO: also need to break out in case of errors
CMSampleBufferRef dequeuedSampleBuffer =
(CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);
if (dequeuedSampleBuffer == NULL) {
break;
}
[self _appendVideoSampleBuffer:dequeuedSampleBuffer];
CFRelease(dequeuedSampleBuffer);
}
}
// Fast path, just append this sample buffer if ready
if (_videoWriterInput.readyForMoreMediaData) {
[self _appendVideoSampleBuffer:sampleBuffer];
} else {
// It is not ready, queuing the sample buffer
CMBufferQueueEnqueue(_videoBufferQueue, sampleBuffer);
}
}
- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
SCAssert([_performer isCurrentPerformer], @"");
SC_GUARD_ELSE_RETURN(sampleBuffer);
if (!CMBufferQueueIsEmpty(_audioBufferQueue)) {
// We need to drain the buffer queue in this case
while (_audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef dequeuedSampleBuffer =
(CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);
if (dequeuedSampleBuffer == NULL) {
break;
}
[_audioWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(dequeuedSampleBuffer);
}
}
// fast path, just append this sample buffer if ready
if ((_audioWriterInput.readyForMoreMediaData)) {
[_audioWriterInput appendSampleBuffer:sampleBuffer];
} else {
// it is not ready, queuing the sample buffer
CMBufferQueueEnqueue(_audioBufferQueue, sampleBuffer);
}
}
- (void)startWritingAtSourceTime:(CMTime)sourceTime
{
SCTraceStart();
SCAssert([_performer isCurrentPerformer], @"");
// To observe the status change on assetWriter because when assetWriter errors out, it only changes the
// status, no further delegate callbacks etc.
[_observeController observe:_assetWriter
keyPath:@keypath(_assetWriter, status)
options:NSKeyValueObservingOptionNew
action:@selector(assetWriterStatusChanged:)];
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:sourceTime];
}
- (void)cancelWriting
{
SCTraceStart();
SCAssert([_performer isCurrentPerformer], @"");
CMBufferQueueReset(_videoBufferQueue);
CMBufferQueueReset(_audioBufferQueue);
[_assetWriter cancelWriting];
}
- (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock
{
SCTraceStart();
SCAssert([_performer isCurrentPerformer], @"");
while (_audioWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_audioBufferQueue)) {
CMSampleBufferRef audioSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);
if (audioSampleBuffer == NULL) {
break;
}
[_audioWriterInput appendSampleBuffer:audioSampleBuffer];
CFRelease(audioSampleBuffer);
}
while (_videoWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_videoBufferQueue)) {
CMSampleBufferRef videoSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);
if (videoSampleBuffer == NULL) {
break;
}
[_videoWriterInput appendSampleBuffer:videoSampleBuffer];
CFRelease(videoSampleBuffer);
}
dispatch_block_t finishWritingBlock = ^() {
[_assetWriter endSessionAtSourceTime:sourceTime];
[_audioWriterInput markAsFinished];
[_videoWriterInput markAsFinished];
[_assetWriter finishWritingWithCompletionHandler:^{
if (completionBlock) {
completionBlock();
}
}];
};
if (CMBufferQueueIsEmpty(_audioBufferQueue) && CMBufferQueueIsEmpty(_videoBufferQueue)) {
finishWritingBlock();
} else {
// We need to drain the samples from the queues before finish writing
__block BOOL isAudioDone = NO;
__block BOOL isVideoDone = NO;
// Audio
[_audioWriterInput
requestMediaDataWhenReadyOnQueue:_performer.queue
usingBlock:^{
if (!CMBufferQueueIsEmpty(_audioBufferQueue) &&
_assetWriter.status == AVAssetWriterStatusWriting) {
CMSampleBufferRef audioSampleBuffer =
(CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);
if (audioSampleBuffer) {
[_audioWriterInput appendSampleBuffer:audioSampleBuffer];
CFRelease(audioSampleBuffer);
}
} else if (!isAudioDone) {
isAudioDone = YES;
}
if (isAudioDone && isVideoDone) {
finishWritingBlock();
}
}];
// Video
[_videoWriterInput
requestMediaDataWhenReadyOnQueue:_performer.queue
usingBlock:^{
if (!CMBufferQueueIsEmpty(_videoBufferQueue) &&
_assetWriter.status == AVAssetWriterStatusWriting) {
CMSampleBufferRef videoSampleBuffer =
(CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);
if (videoSampleBuffer) {
[_videoWriterInput appendSampleBuffer:videoSampleBuffer];
CFRelease(videoSampleBuffer);
}
} else if (!isVideoDone) {
isVideoDone = YES;
}
if (isAudioDone && isVideoDone) {
finishWritingBlock();
}
}];
}
}
- (void)cleanUp
{
_assetWriter = nil;
_videoWriterInput = nil;
_audioWriterInput = nil;
_pixelBufferAdaptor = nil;
}
- (void)dealloc
{
CFRelease(_videoBufferQueue);
CFRelease(_audioBufferQueue);
CVPixelBufferPoolRelease(_defaultPixelBufferPool);
CVPixelBufferPoolRelease(_nightPixelBufferPool);
CVPixelBufferPoolRelease(_lensesPixelBufferPool);
[_observeController unobserveAll];
}
- (void)assetWriterStatusChanged:(NSDictionary *)change
{
SCTraceStart();
if (_assetWriter.status == AVAssetWriterStatusFailed) {
SCTraceSignal(@"Asset writer status failed %@, error %@", change, _assetWriter.error);
[_delegate videoWriterDidFailWritingWithError:[_assetWriter.error copy]];
}
}
#pragma - Private methods
- (CVImageBufferRef)_croppedPixelBufferWithInputPixelBuffer:(CVImageBufferRef)inputPixelBuffer
{
SCAssertTrue([SCDeviceName isIphoneX]);
const size_t inputBufferWidth = CVPixelBufferGetWidth(inputPixelBuffer);
const size_t inputBufferHeight = CVPixelBufferGetHeight(inputPixelBuffer);
const size_t croppedBufferWidth = (size_t)(inputBufferWidth * kSCIPhoneXCapturedImageVideoCropRatio) / 2 * 2;
const size_t croppedBufferHeight =
(size_t)(croppedBufferWidth * SCManagedCapturedImageAndVideoAspectRatio()) / 2 * 2;
const size_t offsetPointX = inputBufferWidth - croppedBufferWidth;
const size_t offsetPointY = (inputBufferHeight - croppedBufferHeight) / 4 * 2;
SC_GUARD_ELSE_RUN_AND_RETURN_VALUE((inputBufferWidth >= croppedBufferWidth) &&
(inputBufferHeight >= croppedBufferHeight) && (offsetPointX % 2 == 0) &&
(offsetPointY % 2 == 0) &&
(inputBufferWidth >= croppedBufferWidth + offsetPointX) &&
(inputBufferHeight >= croppedBufferHeight + offsetPointY),
SCLogGeneralError(@"Invalid cropping configuration"), NULL);
CVPixelBufferRef croppedPixelBuffer = NULL;
CVPixelBufferPoolRef pixelBufferPool =
[self _pixelBufferPoolWithInputSize:CGSizeMake(inputBufferWidth, inputBufferHeight)
croppedSize:CGSizeMake(croppedBufferWidth, croppedBufferHeight)];
if (pixelBufferPool) {
CVReturn result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &croppedPixelBuffer);
if ((result != kCVReturnSuccess) || (croppedPixelBuffer == NULL)) {
SCLogGeneralError(@"[SCCapturerVideoWriterInput] Error creating croppedPixelBuffer");
return NULL;
}
} else {
SCAssertFail(@"[SCCapturerVideoWriterInput] PixelBufferPool is NULL with inputBufferWidth:%@, "
@"inputBufferHeight:%@, croppedBufferWidth:%@, croppedBufferHeight:%@",
@(inputBufferWidth), @(inputBufferHeight), @(croppedBufferWidth), @(croppedBufferHeight));
return NULL;
}
CVPixelBufferLockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferLockBaseAddress(croppedPixelBuffer, 0);
const size_t planesCount = CVPixelBufferGetPlaneCount(inputPixelBuffer);
for (int planeIndex = 0; planeIndex < planesCount; planeIndex++) {
size_t inPlaneHeight = CVPixelBufferGetHeightOfPlane(inputPixelBuffer, planeIndex);
size_t inPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(inputPixelBuffer, planeIndex);
uint8_t *inPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(inputPixelBuffer, planeIndex);
size_t croppedPlaneHeight = CVPixelBufferGetHeightOfPlane(croppedPixelBuffer, planeIndex);
size_t croppedPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(croppedPixelBuffer, planeIndex);
uint8_t *croppedPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(croppedPixelBuffer, planeIndex);
// Note that inPlaneBytesPerRow is not strictly 2x of inPlaneWidth for some devices (e.g. iPhone X).
// However, since UV are packed together in memory, we can use offsetPointX for all planes
size_t offsetPlaneBytesX = offsetPointX;
size_t offsetPlaneBytesY = offsetPointY * inPlaneHeight / inputBufferHeight;
inPlaneAdress = inPlaneAdress + offsetPlaneBytesY * inPlaneBytesPerRow + offsetPlaneBytesX;
size_t bytesToCopyPerRow = MIN(inPlaneBytesPerRow - offsetPlaneBytesX, croppedPlaneBytesPerRow);
for (int i = 0; i < croppedPlaneHeight; i++) {
memcpy(croppedPlaneAdress, inPlaneAdress, bytesToCopyPerRow);
inPlaneAdress += inPlaneBytesPerRow;
croppedPlaneAdress += croppedPlaneBytesPerRow;
}
}
CVPixelBufferUnlockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(croppedPixelBuffer, 0);
return croppedPixelBuffer;
}
- (CVPixelBufferPoolRef)_pixelBufferPoolWithInputSize:(CGSize)inputSize croppedSize:(CGSize)croppedSize
{
if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice defaultActiveFormatResolution])) {
if (_defaultPixelBufferPool == NULL) {
_defaultPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];
}
return _defaultPixelBufferPool;
} else if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice nightModeActiveFormatResolution])) {
if (_nightPixelBufferPool == NULL) {
_nightPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];
}
return _nightPixelBufferPool;
} else {
if (_lensesPixelBufferPool == NULL) {
_lensesPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];
}
return _lensesPixelBufferPool;
}
}
- (CVPixelBufferPoolRef)_newPixelBufferPoolWithWidth:(size_t)width height:(size_t)height
{
NSDictionary *attributes = @{
(NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *)
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)
kCVPixelBufferWidthKey : @(width), (NSString *)
kCVPixelBufferHeightKey : @(height)
};
CVPixelBufferPoolRef pixelBufferPool = NULL;
CVReturn result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL,
(__bridge CFDictionaryRef _Nullable)(attributes), &pixelBufferPool);
if (result != kCVReturnSuccess) {
SCLogGeneralError(@"[SCCapturerBufferredVideoWriter] Error creating pixel buffer pool %i", result);
return NULL;
}
return pixelBufferPool;
}
- (void)_appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
SCAssert([_performer isCurrentPerformer], @"");
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CVImageBufferRef inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if ([SCDeviceName isIphoneX]) {
CVImageBufferRef croppedPixelBuffer = [self _croppedPixelBufferWithInputPixelBuffer:inputPixelBuffer];
if (croppedPixelBuffer) {
[_pixelBufferAdaptor appendPixelBuffer:croppedPixelBuffer withPresentationTime:presentationTime];
CVPixelBufferRelease(croppedPixelBuffer);
}
} else {
[_pixelBufferAdaptor appendPixelBuffer:inputPixelBuffer withPresentationTime:presentationTime];
}
}
@end

View File

@ -0,0 +1,20 @@
//
// SCCapturerDefines.h
// Snapchat
//
// Created by Chao Pang on 12/20/17.
//
#import <Foundation/Foundation.h>
typedef NS_ENUM(NSInteger, SCCapturerLightingConditionType) {
SCCapturerLightingConditionTypeNormal = 0,
SCCapturerLightingConditionTypeDark,
SCCapturerLightingConditionTypeExtremeDark,
};
typedef struct SampleBufferMetadata {
int isoSpeedRating;
float exposureTime;
float brightness;
} SampleBufferMetadata;

View File

@ -0,0 +1,18 @@
//
// SCCapturerToken.h
// Snapchat
//
// Created by Xishuo Liu on 3/24/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface SCCapturerToken : NSObject
- (instancetype)initWithIdentifier:(NSString *)identifier NS_DESIGNATED_INITIALIZER;
- (instancetype)init __attribute__((unavailable("Use initWithIdentifier: instead.")));
- (instancetype) new __attribute__((unavailable("Use initWithIdentifier: instead.")));
@end

View File

@ -0,0 +1,30 @@
//
// SCCapturerToken.m
// Snapchat
//
// Created by Xishuo Liu on 3/24/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCCapturerToken.h"
#import <SCFoundation/NSString+SCFormat.h>
@implementation SCCapturerToken {
NSString *_identifier;
}
- (instancetype)initWithIdentifier:(NSString *)identifier
{
if (self = [super init]) {
_identifier = identifier.copy;
}
return self;
}
- (NSString *)debugDescription
{
return [NSString sc_stringWithFormat:@"%@_%@", _identifier, self];
}
@end

View File

@ -0,0 +1,20 @@
//
// Created by Aaron Levine on 10/16/17.
//
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
@class SCCapturerToken;
NS_ASSUME_NONNULL_BEGIN
@interface SCCapturerTokenProvider : NSObject
SC_INIT_AND_NEW_UNAVAILABLE
+ (instancetype)providerWithToken:(SCCapturerToken *)token;
- (nullable SCCapturerToken *)getTokenAndInvalidate;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,42 @@
//
// Created by Aaron Levine on 10/16/17.
//
#import "SCCapturerTokenProvider.h"
#import "SCCapturerToken.h"
#import <SCBase/SCAssignment.h>
#import <SCFoundation/SCAssertWrapper.h>
@implementation SCCapturerTokenProvider {
SCCapturerToken *_Nullable _token;
}
+ (instancetype)providerWithToken:(SCCapturerToken *)token
{
return [[self alloc] initWithToken:token];
}
- (instancetype)initWithToken:(SCCapturerToken *)token
{
self = [super init];
if (self) {
_token = token;
}
return self;
}
- (nullable SCCapturerToken *)getTokenAndInvalidate
{
// ensure serial access by requiring calls be on the main thread
SCAssertMainThread();
let token = _token;
_token = nil;
return token;
}
@end

View File

@ -0,0 +1,18 @@
//
// SCExposureState.h
// Snapchat
//
// Created by Derek Peirce on 4/10/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
@interface SCExposureState : NSObject
- (instancetype)initWithDevice:(AVCaptureDevice *)device;
- (void)applyISOAndExposureDurationToDevice:(AVCaptureDevice *)device;
@end

View File

@ -0,0 +1,47 @@
//
// SCExposureState.m
// Snapchat
//
// Created by Derek Peirce on 4/10/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCExposureState.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import <SCBase/SCMacros.h>
@import AVFoundation;
@implementation SCExposureState {
float _ISO;
CMTime _exposureDuration;
}
- (instancetype)initWithDevice:(AVCaptureDevice *)device
{
if (self = [super init]) {
_ISO = device.ISO;
_exposureDuration = device.exposureDuration;
}
return self;
}
- (void)applyISOAndExposureDurationToDevice:(AVCaptureDevice *)device
{
if ([device isExposureModeSupported:AVCaptureExposureModeCustom]) {
[device runTask:@"set prior exposure"
withLockedConfiguration:^() {
CMTime exposureDuration =
CMTimeClampToRange(_exposureDuration, CMTimeRangeMake(device.activeFormat.minExposureDuration,
device.activeFormat.maxExposureDuration));
[device setExposureModeCustomWithDuration:exposureDuration
ISO:SC_CLAMP(_ISO, device.activeFormat.minISO,
device.activeFormat.maxISO)
completionHandler:nil];
}];
}
}
@end

View File

@ -0,0 +1,19 @@
//
// SCFileAudioCaptureSession.h
// Snapchat
//
// Created by Xiaomu Wu on 2/2/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCAudioCaptureSession.h"
#import <Foundation/Foundation.h>
@interface SCFileAudioCaptureSession : NSObject <SCAudioCaptureSession>
// Linear PCM is required.
// To best mimic `SCAudioCaptureSession`, use an audio file recorded from it.
- (void)setFileURL:(NSURL *)fileURL;
@end

View File

@ -0,0 +1,243 @@
//
// SCFileAudioCaptureSession.m
// Snapchat
//
// Created by Xiaomu Wu on 2/2/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCFileAudioCaptureSession.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCSentinel.h>
@import AudioToolbox;
static float const kAudioBufferDurationInSeconds = 0.2; // same as SCAudioCaptureSession
static char *const kSCFileAudioCaptureSessionQueueLabel = "com.snapchat.file-audio-capture-session";
@implementation SCFileAudioCaptureSession {
SCQueuePerformer *_performer;
SCSentinel *_sentinel;
NSURL *_fileURL;
AudioFileID _audioFile; // audio file
AudioStreamBasicDescription _asbd; // audio format (core audio)
CMAudioFormatDescriptionRef _formatDescription; // audio format (core media)
SInt64 _readCurPacket; // current packet index to read
UInt32 _readNumPackets; // number of packets to read every time
UInt32 _readNumBytes; // number of bytes to read every time
void *_readBuffer; // data buffer to hold read packets
}
@synthesize delegate = _delegate;
#pragma mark - Public
- (instancetype)init
{
self = [super init];
if (self) {
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCFileAudioCaptureSessionQueueLabel
qualityOfService:QOS_CLASS_UNSPECIFIED
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
_sentinel = [[SCSentinel alloc] init];
}
return self;
}
- (void)dealloc
{
if (_audioFile) {
AudioFileClose(_audioFile);
}
if (_formatDescription) {
CFRelease(_formatDescription);
}
if (_readBuffer) {
free(_readBuffer);
}
}
- (void)setFileURL:(NSURL *)fileURL
{
[_performer perform:^{
_fileURL = fileURL;
}];
}
#pragma mark - SCAudioCaptureSession
- (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate // `sampleRate` ignored
completionHandler:(audio_capture_session_block)completionHandler
{
[_performer perform:^{
BOOL succeeded = [self _setup];
int32_t sentinelValue = [_sentinel value];
if (completionHandler) {
completionHandler(nil);
}
if (succeeded) {
[_performer perform:^{
SC_GUARD_ELSE_RETURN([_sentinel value] == sentinelValue);
[self _read];
}
after:kAudioBufferDurationInSeconds];
}
}];
}
- (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
{
[_performer performAndWait:^{
[self _teardown];
if (completionHandler) {
completionHandler();
}
}];
}
#pragma mark - Private
- (BOOL)_setup
{
SCAssert([_performer isCurrentPerformer], @"");
[_sentinel increment];
OSStatus status = noErr;
status = AudioFileOpenURL((__bridge CFURLRef)_fileURL, kAudioFileReadPermission, 0, &_audioFile);
if (noErr != status) {
SCLogGeneralError(@"Cannot open file at URL %@, error code %d", _fileURL, (int)status);
return NO;
}
_asbd = (AudioStreamBasicDescription){0};
UInt32 asbdSize = sizeof(_asbd);
status = AudioFileGetProperty(_audioFile, kAudioFilePropertyDataFormat, &asbdSize, &_asbd);
if (noErr != status) {
SCLogGeneralError(@"Cannot get audio data format, error code %d", (int)status);
AudioFileClose(_audioFile);
_audioFile = NULL;
return NO;
}
if (kAudioFormatLinearPCM != _asbd.mFormatID) {
SCLogGeneralError(@"Linear PCM is required");
AudioFileClose(_audioFile);
_audioFile = NULL;
_asbd = (AudioStreamBasicDescription){0};
return NO;
}
UInt32 aclSize = 0;
AudioChannelLayout *acl = NULL;
status = AudioFileGetPropertyInfo(_audioFile, kAudioFilePropertyChannelLayout, &aclSize, NULL);
if (noErr == status) {
acl = malloc(aclSize);
status = AudioFileGetProperty(_audioFile, kAudioFilePropertyChannelLayout, &aclSize, acl);
if (noErr != status) {
aclSize = 0;
free(acl);
acl = NULL;
}
}
status = CMAudioFormatDescriptionCreate(NULL, &_asbd, aclSize, acl, 0, NULL, NULL, &_formatDescription);
if (acl) {
free(acl);
acl = NULL;
}
if (noErr != status) {
SCLogGeneralError(@"Cannot create format description, error code %d", (int)status);
AudioFileClose(_audioFile);
_audioFile = NULL;
_asbd = (AudioStreamBasicDescription){0};
return NO;
}
_readCurPacket = 0;
_readNumPackets = ceil(_asbd.mSampleRate * kAudioBufferDurationInSeconds);
_readNumBytes = _asbd.mBytesPerPacket * _readNumPackets;
_readBuffer = malloc(_readNumBytes);
return YES;
}
- (void)_read
{
SCAssert([_performer isCurrentPerformer], @"");
OSStatus status = noErr;
UInt32 numBytes = _readNumBytes;
UInt32 numPackets = _readNumPackets;
status = AudioFileReadPacketData(_audioFile, NO, &numBytes, NULL, _readCurPacket, &numPackets, _readBuffer);
if (noErr != status) {
SCLogGeneralError(@"Cannot read audio data, error code %d", (int)status);
return;
}
if (0 == numPackets) {
return;
}
CMTime PTS = CMTimeMakeWithSeconds(_readCurPacket / _asbd.mSampleRate, 600);
_readCurPacket += numPackets;
CMBlockBufferRef dataBuffer = NULL;
status = CMBlockBufferCreateWithMemoryBlock(NULL, NULL, numBytes, NULL, NULL, 0, numBytes, 0, &dataBuffer);
if (kCMBlockBufferNoErr == status) {
if (dataBuffer) {
CMBlockBufferReplaceDataBytes(_readBuffer, dataBuffer, 0, numBytes);
CMSampleBufferRef sampleBuffer = NULL;
CMAudioSampleBufferCreateWithPacketDescriptions(NULL, dataBuffer, true, NULL, NULL, _formatDescription,
numPackets, PTS, NULL, &sampleBuffer);
if (sampleBuffer) {
[_delegate audioCaptureSession:self didOutputSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
CFRelease(dataBuffer);
}
} else {
SCLogGeneralError(@"Cannot create data buffer, error code %d", (int)status);
}
int32_t sentinelValue = [_sentinel value];
[_performer perform:^{
SC_GUARD_ELSE_RETURN([_sentinel value] == sentinelValue);
[self _read];
}
after:kAudioBufferDurationInSeconds];
}
- (void)_teardown
{
SCAssert([_performer isCurrentPerformer], @"");
[_sentinel increment];
if (_audioFile) {
AudioFileClose(_audioFile);
_audioFile = NULL;
}
_asbd = (AudioStreamBasicDescription){0};
if (_formatDescription) {
CFRelease(_formatDescription);
_formatDescription = NULL;
}
_readCurPacket = 0;
_readNumPackets = 0;
_readNumBytes = 0;
if (_readBuffer) {
free(_readBuffer);
_readBuffer = NULL;
}
}
@end

View File

@ -0,0 +1,20 @@
//
// SCManagedAudioStreamer.h
// Snapchat
//
// Created by Ricardo Sánchez-Sáez on 7/28/16.
// Copyright © 2016 Snapchat, Inc. All rights reserved.
//
#import <SCCameraFoundation/SCManagedAudioDataSource.h>
#import <Foundation/Foundation.h>
@interface SCManagedAudioStreamer : NSObject <SCManagedAudioDataSource>
+ (instancetype)sharedInstance;
+ (instancetype) new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
@end

View File

@ -0,0 +1,115 @@
//
// SCManagedAudioStreamer.m
// Snapchat
//
// Created by Ricardo Sánchez-Sáez on 7/28/16.
// Copyright © 2016 Snapchat, Inc. All rights reserved.
//
#import "SCManagedAudioStreamer.h"
#import "SCAudioCaptureSession.h"
#import <SCAudio/SCAudioSession.h>
#import <SCCameraFoundation/SCManagedAudioDataSourceListenerAnnouncer.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTrace.h>
#import <SCAudioScope/SCAudioScope.h>
#import <SCAudioScope/SCAudioSessionExperimentAdapter.h>
static char *const kSCManagedAudioStreamerQueueLabel = "com.snapchat.audioStreamerQueue";
@interface SCManagedAudioStreamer () <SCAudioCaptureSessionDelegate>
@end
@implementation SCManagedAudioStreamer {
SCAudioCaptureSession *_captureSession;
SCAudioConfigurationToken *_audioConfiguration;
SCManagedAudioDataSourceListenerAnnouncer *_announcer;
SCScopedAccess<SCMutableAudioSession *> *_scopedMutableAudioSession;
}
@synthesize performer = _performer;
+ (instancetype)sharedInstance
{
static dispatch_once_t onceToken;
static SCManagedAudioStreamer *managedAudioStreamer;
dispatch_once(&onceToken, ^{
managedAudioStreamer = [[SCManagedAudioStreamer alloc] initSharedInstance];
});
return managedAudioStreamer;
}
- (instancetype)initSharedInstance
{
SCTraceStart();
self = [super init];
if (self) {
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedAudioStreamerQueueLabel
qualityOfService:QOS_CLASS_USER_INTERACTIVE
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
_announcer = [[SCManagedAudioDataSourceListenerAnnouncer alloc] init];
_captureSession = [[SCAudioCaptureSession alloc] init];
_captureSession.delegate = self;
}
return self;
}
- (BOOL)isStreaming
{
return _audioConfiguration != nil;
}
- (void)startStreamingWithAudioConfiguration:(SCAudioConfiguration *)configuration
{
SCTraceStart();
[_performer perform:^{
if (!self.isStreaming) {
// Begin audio recording asynchronously. First we need to have the proper audio session category.
_audioConfiguration = [SCAudioSessionExperimentAdapter
configureWith:configuration
performer:_performer
completion:^(NSError *error) {
[_captureSession
beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate
completionHandler:NULL];
}];
}
}];
}
- (void)stopStreaming
{
[_performer perform:^{
if (self.isStreaming) {
[_captureSession disposeAudioRecordingSynchronouslyWithCompletionHandler:NULL];
[SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil];
_audioConfiguration = nil;
}
}];
}
- (void)addListener:(id<SCManagedAudioDataSourceListener>)listener
{
SCTraceStart();
[_announcer addListener:listener];
}
- (void)removeListener:(id<SCManagedAudioDataSourceListener>)listener
{
SCTraceStart();
[_announcer removeListener:listener];
}
- (void)audioCaptureSession:(SCAudioCaptureSession *)audioCaptureSession
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
[_announcer managedAudioDataSource:self didOutputSampleBuffer:sampleBuffer];
}
@end

View File

@ -0,0 +1,71 @@
//
// SCManagedCaptureDevice+SCManagedCapturer.h
// Snapchat
//
// Created by Liu Liu on 5/9/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDevice.h"
#import <AVFoundation/AVFoundation.h>
@interface SCManagedCaptureDevice (SCManagedCapturer)
@property (nonatomic, strong, readonly) AVCaptureDevice *device;
@property (nonatomic, strong, readonly) AVCaptureDeviceInput *deviceInput;
@property (nonatomic, copy, readonly) NSError *error;
@property (nonatomic, assign, readonly) BOOL isConnected;
@property (nonatomic, strong, readonly) AVCaptureDeviceFormat *activeFormat;
// Setup and hook up with device
- (BOOL)setDeviceAsInput:(AVCaptureSession *)session;
- (void)removeDeviceAsInput:(AVCaptureSession *)session;
- (void)resetDeviceAsInput;
// Configurations
@property (nonatomic, assign) BOOL flashActive;
@property (nonatomic, assign) BOOL torchActive;
@property (nonatomic, assign) float zoomFactor;
@property (nonatomic, assign, readonly) BOOL liveVideoStreamingActive;
@property (nonatomic, assign, readonly) BOOL isNightModeActive;
@property (nonatomic, assign, readonly) BOOL isFlashSupported;
@property (nonatomic, assign, readonly) BOOL isTorchSupported;
- (void)setNightModeActive:(BOOL)nightModeActive session:(AVCaptureSession *)session;
- (void)setLiveVideoStreaming:(BOOL)liveVideoStreaming session:(AVCaptureSession *)session;
- (void)setCaptureDepthData:(BOOL)captureDepthData session:(AVCaptureSession *)session;
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser;
- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest;
- (void)continuousAutofocus;
- (void)setRecording:(BOOL)recording;
- (void)updateActiveFormatWithSession:(AVCaptureSession *)session;
// Utilities
- (CGPoint)convertViewCoordinates:(CGPoint)viewCoordinates
viewSize:(CGSize)viewSize
videoGravity:(NSString *)videoGravity;
@end

View File

@ -0,0 +1,17 @@
//
// SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h
// Snapchat
//
// Created by Kam Sheffield on 10/29/15.
// Copyright © 2015 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDevice.h"
#import <AVFoundation/AVFoundation.h>
@interface SCManagedCaptureDevice (SCManagedDeviceCapacityAnalyzer)
@property (nonatomic, strong, readonly) AVCaptureDevice *device;
@end

View File

@ -0,0 +1,60 @@
//
// SCManagedCaptureDevice.h
// Snapchat
//
// Created by Liu Liu on 4/22/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import <SCCameraFoundation/SCManagedCaptureDevicePosition.h>
#import <SCCameraFoundation/SCManagedCaptureDeviceProtocol.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
extern CGFloat const kSCMaxVideoZoomFactor;
extern CGFloat const kSCMinVideoZoomFactor;
@class SCManagedCaptureDevice;
@protocol SCManagedCaptureDeviceDelegate <NSObject>
@optional
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeAdjustingExposure:(BOOL)adjustingExposure;
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeExposurePoint:(CGPoint)exposurePoint;
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeFocusPoint:(CGPoint)focusPoint;
@end
@interface SCManagedCaptureDevice : NSObject <SCManagedCaptureDeviceProtocol>
@property (nonatomic, weak) id<SCManagedCaptureDeviceDelegate> delegate;
// These two class methods are thread safe
+ (instancetype)front;
+ (instancetype)back;
+ (instancetype)dualCamera;
+ (instancetype)deviceWithPosition:(SCManagedCaptureDevicePosition)position;
+ (BOOL)is1080pSupported;
+ (BOOL)isMixCaptureSupported;
+ (BOOL)isNightModeSupported;
+ (BOOL)isEnhancedNightModeSupported;
+ (CGSize)defaultActiveFormatResolution;
+ (CGSize)nightModeActiveFormatResolution;
- (BOOL)softwareZoom;
- (SCManagedCaptureDevicePosition)position;
- (BOOL)isAvailable;
@end

View File

@ -0,0 +1,821 @@
//
// SCManagedCaptureDevice.m
// Snapchat
//
// Created by Liu Liu on 4/22/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import "SCManagedCaptureDevice.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCCameraTweaks.h"
#import "SCCaptureCommon.h"
#import "SCCaptureDeviceResolver.h"
#import "SCManagedCaptureDevice+SCManagedCapturer.h"
#import "SCManagedCaptureDeviceAutoExposureHandler.h"
#import "SCManagedCaptureDeviceAutoFocusHandler.h"
#import "SCManagedCaptureDeviceExposureHandler.h"
#import "SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h"
#import "SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h"
#import "SCManagedCaptureDeviceFocusHandler.h"
#import "SCManagedCapturer.h"
#import "SCManagedDeviceCapacityAnalyzer.h"
#import <SCFoundation/SCDeviceName.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCTrace.h>
#import <FBKVOController/FBKVOController.h>
static int32_t const kSCManagedCaptureDeviceMaximumHighFrameRate = 30;
static int32_t const kSCManagedCaptureDeviceMaximumLowFrameRate = 24;
static float const kSCManagedCaptureDevicecSoftwareMaxZoomFactor = 8;
CGFloat const kSCMaxVideoZoomFactor = 100; // the max videoZoomFactor acceptable
CGFloat const kSCMinVideoZoomFactor = 1;
static NSDictionary *SCBestHRSIFormatsForHeights(NSArray *desiredHeights, NSArray *formats, BOOL shouldSupportDepth)
{
NSMutableDictionary *bestHRSIHeights = [NSMutableDictionary dictionary];
for (NSNumber *height in desiredHeights) {
bestHRSIHeights[height] = @0;
}
NSMutableDictionary *bestHRSIFormats = [NSMutableDictionary dictionary];
for (AVCaptureDeviceFormat *format in formats) {
if (@available(ios 11.0, *)) {
if (shouldSupportDepth && format.supportedDepthDataFormats.count == 0) {
continue;
}
}
if (CMFormatDescriptionGetMediaSubType(format.formatDescription) !=
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
continue;
}
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
NSNumber *height = @(dimensions.height);
NSNumber *bestHRSI = bestHRSIHeights[height];
if (bestHRSI) {
CMVideoDimensions hrsi = format.highResolutionStillImageDimensions;
// If we enabled HSRI, we only intersted in the ones that is good.
if (hrsi.height > [bestHRSI intValue]) {
bestHRSIHeights[height] = @(hrsi.height);
bestHRSIFormats[height] = format;
}
}
}
return [bestHRSIFormats copy];
}
static inline float SCDegreesToRadians(float theta)
{
return theta * (float)M_PI / 180.f;
}
static inline float SCRadiansToDegrees(float theta)
{
return theta * 180.f / (float)M_PI;
}
@implementation SCManagedCaptureDevice {
AVCaptureDevice *_device;
AVCaptureDeviceInput *_deviceInput;
AVCaptureDeviceFormat *_defaultFormat;
AVCaptureDeviceFormat *_nightFormat;
AVCaptureDeviceFormat *_liveVideoStreamingFormat;
SCManagedCaptureDevicePosition _devicePosition;
// Configurations on the device, shortcut to avoid re-configurations
id<SCManagedCaptureDeviceExposureHandler> _exposureHandler;
id<SCManagedCaptureDeviceFocusHandler> _focusHandler;
FBKVOController *_observeController;
// For the private category methods
NSError *_error;
BOOL _softwareZoom;
BOOL _isConnected;
BOOL _flashActive;
BOOL _torchActive;
BOOL _liveVideoStreamingActive;
float _zoomFactor;
BOOL _isNightModeActive;
BOOL _captureDepthData;
}
@synthesize fieldOfView = _fieldOfView;
+ (instancetype)front
{
SCTraceStart();
static dispatch_once_t onceToken;
static SCManagedCaptureDevice *front;
static dispatch_semaphore_t semaphore;
dispatch_once(&onceToken, ^{
semaphore = dispatch_semaphore_create(1);
});
/* You can use the tweak below to intentionally kill camera in debug.
if (SCIsDebugBuild() && SCCameraTweaksKillFrontCamera()) {
return nil;
}
*/
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
if (!front) {
AVCaptureDevice *device =
[[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionFront];
if (device) {
front = [[SCManagedCaptureDevice alloc] initWithDevice:device
devicePosition:SCManagedCaptureDevicePositionFront];
}
}
dispatch_semaphore_signal(semaphore);
return front;
}
+ (instancetype)back
{
SCTraceStart();
static dispatch_once_t onceToken;
static SCManagedCaptureDevice *back;
static dispatch_semaphore_t semaphore;
dispatch_once(&onceToken, ^{
semaphore = dispatch_semaphore_create(1);
});
/* You can use the tweak below to intentionally kill camera in debug.
if (SCIsDebugBuild() && SCCameraTweaksKillBackCamera()) {
return nil;
}
*/
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
if (!back) {
AVCaptureDevice *device =
[[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionBack];
if (device) {
back = [[SCManagedCaptureDevice alloc] initWithDevice:device
devicePosition:SCManagedCaptureDevicePositionBack];
}
}
dispatch_semaphore_signal(semaphore);
return back;
}
+ (SCManagedCaptureDevice *)dualCamera
{
SCTraceStart();
static dispatch_once_t onceToken;
static SCManagedCaptureDevice *dualCamera;
static dispatch_semaphore_t semaphore;
dispatch_once(&onceToken, ^{
semaphore = dispatch_semaphore_create(1);
});
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
if (!dualCamera) {
AVCaptureDevice *device = [[SCCaptureDeviceResolver sharedInstance] findDualCamera];
if (device) {
dualCamera = [[SCManagedCaptureDevice alloc] initWithDevice:device
devicePosition:SCManagedCaptureDevicePositionBackDualCamera];
}
}
dispatch_semaphore_signal(semaphore);
return dualCamera;
}
+ (instancetype)deviceWithPosition:(SCManagedCaptureDevicePosition)position
{
switch (position) {
case SCManagedCaptureDevicePositionFront:
return [self front];
case SCManagedCaptureDevicePositionBack:
return [self back];
case SCManagedCaptureDevicePositionBackDualCamera:
return [self dualCamera];
}
}
+ (BOOL)is1080pSupported
{
return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer];
}
+ (BOOL)isMixCaptureSupported
{
return !![self front] && !![self back];
}
+ (BOOL)isNightModeSupported
{
return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6orNewer];
}
+ (BOOL)isEnhancedNightModeSupported
{
if (SC_AT_LEAST_IOS_11) {
return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer];
}
return NO;
}
+ (CGSize)defaultActiveFormatResolution
{
if ([SCDeviceName isIphoneX]) {
return CGSizeMake(kSCManagedCapturerVideoActiveFormatWidth1080p,
kSCManagedCapturerVideoActiveFormatHeight1080p);
}
return CGSizeMake(kSCManagedCapturerDefaultVideoActiveFormatWidth,
kSCManagedCapturerDefaultVideoActiveFormatHeight);
}
+ (CGSize)nightModeActiveFormatResolution
{
if ([SCManagedCaptureDevice isEnhancedNightModeSupported]) {
return CGSizeMake(kSCManagedCapturerNightVideoHighResActiveFormatWidth,
kSCManagedCapturerNightVideoHighResActiveFormatHeight);
}
return CGSizeMake(kSCManagedCapturerNightVideoDefaultResActiveFormatWidth,
kSCManagedCapturerNightVideoDefaultResActiveFormatHeight);
}
- (instancetype)initWithDevice:(AVCaptureDevice *)device devicePosition:(SCManagedCaptureDevicePosition)devicePosition
{
SCTraceStart();
self = [super init];
if (self) {
_device = device;
_devicePosition = devicePosition;
if (SCCameraTweaksEnableFaceDetectionFocus(devicePosition)) {
_exposureHandler = [[SCManagedCaptureDeviceFaceDetectionAutoExposureHandler alloc]
initWithDevice:device
pointOfInterest:CGPointMake(0.5, 0.5)
managedCapturer:[SCManagedCapturer sharedInstance]];
_focusHandler = [[SCManagedCaptureDeviceFaceDetectionAutoFocusHandler alloc]
initWithDevice:device
pointOfInterest:CGPointMake(0.5, 0.5)
managedCapturer:[SCManagedCapturer sharedInstance]];
} else {
_exposureHandler = [[SCManagedCaptureDeviceAutoExposureHandler alloc] initWithDevice:device
pointOfInterest:CGPointMake(0.5, 0.5)];
_focusHandler = [[SCManagedCaptureDeviceAutoFocusHandler alloc] initWithDevice:device
pointOfInterest:CGPointMake(0.5, 0.5)];
}
_observeController = [[FBKVOController alloc] initWithObserver:self];
[self _setAsExposureListenerForDevice:device];
if (SCCameraTweaksEnableExposurePointObservation()) {
[self _observeExposurePointForDevice:device];
}
if (SCCameraTweaksEnableFocusPointObservation()) {
[self _observeFocusPointForDevice:device];
}
_zoomFactor = 1.0;
[self _findSupportedFormats];
}
return self;
}
- (SCManagedCaptureDevicePosition)position
{
return _devicePosition;
}
#pragma mark - Setup and hook up with device
- (BOOL)setDeviceAsInput:(AVCaptureSession *)session
{
SCTraceStart();
AVCaptureDeviceInput *deviceInput = [self deviceInput];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
} else {
NSString *previousSessionPreset = session.sessionPreset;
session.sessionPreset = AVCaptureSessionPresetInputPriority;
// Now we surely can add input
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
} else {
session.sessionPreset = previousSessionPreset;
return NO;
}
}
[self _enableSubjectAreaChangeMonitoring];
[self _updateActiveFormatWithSession:session fallbackPreset:AVCaptureSessionPreset640x480];
if (_device.activeFormat.videoMaxZoomFactor < 1 + 1e-5) {
_softwareZoom = YES;
} else {
_softwareZoom = NO;
if (_device.videoZoomFactor != _zoomFactor) {
// Reset the zoom factor
[self setZoomFactor:_zoomFactor];
}
}
[_exposureHandler setVisible:YES];
[_focusHandler setVisible:YES];
_isConnected = YES;
return YES;
}
- (void)removeDeviceAsInput:(AVCaptureSession *)session
{
SCTraceStart();
if (_isConnected) {
[session removeInput:_deviceInput];
[_exposureHandler setVisible:NO];
[_focusHandler setVisible:NO];
_isConnected = NO;
}
}
- (void)resetDeviceAsInput
{
_deviceInput = nil;
AVCaptureDevice *deviceFound;
switch (_devicePosition) {
case SCManagedCaptureDevicePositionFront:
deviceFound = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionFront];
break;
case SCManagedCaptureDevicePositionBack:
deviceFound = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionBack];
break;
case SCManagedCaptureDevicePositionBackDualCamera:
deviceFound = [[SCCaptureDeviceResolver sharedInstance] findDualCamera];
break;
}
if (deviceFound) {
_device = deviceFound;
}
}
#pragma mark - Configurations
- (void)_findSupportedFormats
{
NSInteger defaultHeight = [SCManagedCaptureDevice defaultActiveFormatResolution].height;
NSInteger nightHeight = [SCManagedCaptureDevice nightModeActiveFormatResolution].height;
NSInteger liveVideoStreamingHeight = kSCManagedCapturerLiveStreamingVideoActiveFormatHeight;
NSArray *heights = @[ @(nightHeight), @(defaultHeight), @(liveVideoStreamingHeight) ];
BOOL formatsShouldSupportDepth = _devicePosition == SCManagedCaptureDevicePositionBackDualCamera;
NSDictionary *formats = SCBestHRSIFormatsForHeights(heights, _device.formats, formatsShouldSupportDepth);
_nightFormat = formats[@(nightHeight)];
_defaultFormat = formats[@(defaultHeight)];
_liveVideoStreamingFormat = formats[@(liveVideoStreamingHeight)];
}
- (AVCaptureDeviceFormat *)_bestSupportedFormat
{
if (_isNightModeActive) {
return _nightFormat;
}
if (_liveVideoStreamingActive) {
return _liveVideoStreamingFormat;
}
return _defaultFormat;
}
- (void)setNightModeActive:(BOOL)nightModeActive session:(AVCaptureSession *)session
{
SCTraceStart();
if (![SCManagedCaptureDevice isNightModeSupported]) {
return;
}
if (_isNightModeActive == nightModeActive) {
return;
}
_isNightModeActive = nightModeActive;
[self updateActiveFormatWithSession:session];
}
- (void)setLiveVideoStreaming:(BOOL)liveVideoStreaming session:(AVCaptureSession *)session
{
SCTraceStart();
if (_liveVideoStreamingActive == liveVideoStreaming) {
return;
}
_liveVideoStreamingActive = liveVideoStreaming;
[self updateActiveFormatWithSession:session];
}
- (void)setCaptureDepthData:(BOOL)captureDepthData session:(AVCaptureSession *)session
{
SCTraceStart();
_captureDepthData = captureDepthData;
[self _findSupportedFormats];
[self updateActiveFormatWithSession:session];
}
- (void)updateActiveFormatWithSession:(AVCaptureSession *)session
{
[self _updateActiveFormatWithSession:session fallbackPreset:AVCaptureSessionPreset640x480];
if (_device.videoZoomFactor != _zoomFactor) {
[self setZoomFactor:_zoomFactor];
}
}
- (void)_updateActiveFormatWithSession:(AVCaptureSession *)session fallbackPreset:(NSString *)fallbackPreset
{
AVCaptureDeviceFormat *nextFormat = [self _bestSupportedFormat];
if (nextFormat && [session canSetSessionPreset:AVCaptureSessionPresetInputPriority]) {
session.sessionPreset = AVCaptureSessionPresetInputPriority;
if (nextFormat == _device.activeFormat) {
// Need to reconfigure frame rate though active format unchanged
[_device runTask:@"update frame rate"
withLockedConfiguration:^() {
[self _updateDeviceFrameRate];
}];
} else {
[_device runTask:@"update active format"
withLockedConfiguration:^() {
_device.activeFormat = nextFormat;
[self _updateDeviceFrameRate];
}];
}
} else {
session.sessionPreset = fallbackPreset;
}
[self _updateFieldOfView];
}
- (void)_updateDeviceFrameRate
{
int32_t deviceFrameRate;
if (_liveVideoStreamingActive) {
deviceFrameRate = kSCManagedCaptureDeviceMaximumLowFrameRate;
} else {
deviceFrameRate = kSCManagedCaptureDeviceMaximumHighFrameRate;
}
CMTime frameDuration = CMTimeMake(1, deviceFrameRate);
if (@available(ios 11.0, *)) {
if (_captureDepthData) {
// Sync the video frame rate to the max depth frame rate (24 fps)
if (_device.activeDepthDataFormat.videoSupportedFrameRateRanges.firstObject) {
frameDuration =
_device.activeDepthDataFormat.videoSupportedFrameRateRanges.firstObject.minFrameDuration;
}
}
}
_device.activeVideoMaxFrameDuration = frameDuration;
_device.activeVideoMinFrameDuration = frameDuration;
if (_device.lowLightBoostSupported) {
_device.automaticallyEnablesLowLightBoostWhenAvailable = YES;
}
}
- (void)setZoomFactor:(float)zoomFactor
{
SCTraceStart();
if (_softwareZoom) {
// Just remember the software zoom scale
if (zoomFactor <= kSCManagedCaptureDevicecSoftwareMaxZoomFactor && zoomFactor >= 1) {
_zoomFactor = zoomFactor;
}
} else {
[_device runTask:@"set zoom factor"
withLockedConfiguration:^() {
if (zoomFactor <= _device.activeFormat.videoMaxZoomFactor && zoomFactor >= 1) {
_zoomFactor = zoomFactor;
if (_device.videoZoomFactor != _zoomFactor) {
_device.videoZoomFactor = _zoomFactor;
}
}
}];
}
[self _updateFieldOfView];
}
- (void)_updateFieldOfView
{
float fieldOfView = _device.activeFormat.videoFieldOfView;
if (_zoomFactor > 1.f) {
// Adjust the field of view to take the zoom factor into account.
// Note: this assumes the zoom factor linearly affects the focal length.
fieldOfView = 2.f * SCRadiansToDegrees(atanf(tanf(SCDegreesToRadians(0.5f * fieldOfView)) / _zoomFactor));
}
self.fieldOfView = fieldOfView;
}
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser
{
[_exposureHandler setExposurePointOfInterest:pointOfInterest fromUser:fromUser];
}
// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot.
// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc),
// therefore we don't have to check _focusLock in this method.
- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest
{
SCTraceStart();
[_focusHandler setAutofocusPointOfInterest:pointOfInterest];
}
- (void)continuousAutofocus
{
SCTraceStart();
[_focusHandler continuousAutofocus];
}
- (void)setRecording:(BOOL)recording
{
if (SCCameraTweaksSmoothAutoFocusWhileRecording() && [_device isSmoothAutoFocusSupported]) {
[self _setSmoothFocus:recording];
} else {
[self _setFocusLock:recording];
}
[_exposureHandler setStableExposure:recording];
}
- (void)_setFocusLock:(BOOL)focusLock
{
SCTraceStart();
[_focusHandler setFocusLock:focusLock];
}
- (void)_setSmoothFocus:(BOOL)smoothFocus
{
SCTraceStart();
[_focusHandler setSmoothFocus:smoothFocus];
}
- (void)setFlashActive:(BOOL)flashActive
{
SCTraceStart();
if (_flashActive != flashActive) {
if ([_device hasFlash]) {
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (flashActive && [_device isFlashModeSupported:AVCaptureFlashModeOn]) {
[_device runTask:@"set flash active"
withLockedConfiguration:^() {
_device.flashMode = AVCaptureFlashModeOn;
}];
} else if (!flashActive && [_device isFlashModeSupported:AVCaptureFlashModeOff]) {
[_device runTask:@"set flash off"
withLockedConfiguration:^() {
_device.flashMode = AVCaptureFlashModeOff;
}];
}
#pragma clang diagnostic pop
_flashActive = flashActive;
} else {
_flashActive = NO;
}
}
}
- (void)setTorchActive:(BOOL)torchActive
{
SCTraceStart();
if (_torchActive != torchActive) {
if ([_device hasTorch]) {
if (torchActive && [_device isTorchModeSupported:AVCaptureTorchModeOn]) {
[_device runTask:@"set torch active"
withLockedConfiguration:^() {
[_device setTorchMode:AVCaptureTorchModeOn];
}];
} else if (!torchActive && [_device isTorchModeSupported:AVCaptureTorchModeOff]) {
[_device runTask:@"set torch off"
withLockedConfiguration:^() {
_device.torchMode = AVCaptureTorchModeOff;
}];
}
_torchActive = torchActive;
} else {
_torchActive = NO;
}
}
}
#pragma mark - Utilities
- (BOOL)isFlashSupported
{
return _device.hasFlash;
}
- (BOOL)isTorchSupported
{
return _device.hasTorch;
}
- (CGPoint)convertViewCoordinates:(CGPoint)viewCoordinates
viewSize:(CGSize)viewSize
videoGravity:(NSString *)videoGravity
{
SCTraceStart();
CGPoint pointOfInterest = CGPointMake(.5f, .5f);
CGRect cleanAperture;
AVCaptureDeviceInput *deviceInput = [self deviceInput];
NSArray *ports = [deviceInput.ports copy];
if ([videoGravity isEqualToString:AVLayerVideoGravityResize]) {
// Scale, switch x and y, and reverse x
return CGPointMake(viewCoordinates.y / viewSize.height, 1.f - (viewCoordinates.x / viewSize.width));
}
for (AVCaptureInputPort *port in ports) {
if ([port mediaType] == AVMediaTypeVideo && port.formatDescription) {
cleanAperture = CMVideoFormatDescriptionGetCleanAperture(port.formatDescription, YES);
CGSize apertureSize = cleanAperture.size;
CGPoint point = viewCoordinates;
CGFloat apertureRatio = apertureSize.height / apertureSize.width;
CGFloat viewRatio = viewSize.width / viewSize.height;
CGFloat xc = .5f;
CGFloat yc = .5f;
if ([videoGravity isEqualToString:AVLayerVideoGravityResizeAspect]) {
if (viewRatio > apertureRatio) {
CGFloat y2 = viewSize.height;
CGFloat x2 = viewSize.height * apertureRatio;
CGFloat x1 = viewSize.width;
CGFloat blackBar = (x1 - x2) / 2;
// If point is inside letterboxed area, do coordinate conversion; otherwise, don't change the
// default value returned (.5,.5)
if (point.x >= blackBar && point.x <= blackBar + x2) {
// Scale (accounting for the letterboxing on the left and right of the video preview),
// switch x and y, and reverse x
xc = point.y / y2;
yc = 1.f - ((point.x - blackBar) / x2);
}
} else {
CGFloat y2 = viewSize.width / apertureRatio;
CGFloat y1 = viewSize.height;
CGFloat x2 = viewSize.width;
CGFloat blackBar = (y1 - y2) / 2;
// If point is inside letterboxed area, do coordinate conversion. Otherwise, don't change the
// default value returned (.5,.5)
if (point.y >= blackBar && point.y <= blackBar + y2) {
// Scale (accounting for the letterboxing on the top and bottom of the video preview),
// switch x and y, and reverse x
xc = ((point.y - blackBar) / y2);
yc = 1.f - (point.x / x2);
}
}
} else if ([videoGravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
// Scale, switch x and y, and reverse x
if (viewRatio > apertureRatio) {
CGFloat y2 = apertureSize.width * (viewSize.width / apertureSize.height);
xc = (point.y + ((y2 - viewSize.height) / 2.f)) / y2; // Account for cropped height
yc = (viewSize.width - point.x) / viewSize.width;
} else {
CGFloat x2 = apertureSize.height * (viewSize.height / apertureSize.width);
yc = 1.f - ((point.x + ((x2 - viewSize.width) / 2)) / x2); // Account for cropped width
xc = point.y / viewSize.height;
}
}
pointOfInterest = CGPointMake(xc, yc);
break;
}
}
return pointOfInterest;
}
#pragma mark - SCManagedCapturer friendly methods
- (AVCaptureDevice *)device
{
return _device;
}
- (AVCaptureDeviceInput *)deviceInput
{
SCTraceStart();
if (!_deviceInput) {
NSError *error = nil;
_deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:_device error:&error];
if (!_deviceInput) {
_error = [error copy];
}
}
return _deviceInput;
}
- (NSError *)error
{
return _error;
}
- (BOOL)softwareZoom
{
return _softwareZoom;
}
- (BOOL)isConnected
{
return _isConnected;
}
- (BOOL)flashActive
{
return _flashActive;
}
- (BOOL)torchActive
{
return _torchActive;
}
- (float)zoomFactor
{
return _zoomFactor;
}
- (BOOL)isNightModeActive
{
return _isNightModeActive;
}
- (BOOL)liveVideoStreamingActive
{
return _liveVideoStreamingActive;
}
- (BOOL)isAvailable
{
return [_device isConnected];
}
#pragma mark - Private methods
- (void)_enableSubjectAreaChangeMonitoring
{
SCTraceStart();
[_device runTask:@"enable SubjectAreaChangeMonitoring"
withLockedConfiguration:^() {
_device.subjectAreaChangeMonitoringEnabled = YES;
}];
}
- (AVCaptureDeviceFormat *)activeFormat
{
return _device.activeFormat;
}
#pragma mark - Observe -adjustingExposure
- (void)_setAsExposureListenerForDevice:(AVCaptureDevice *)device
{
SCTraceStart();
SCLogCoreCameraInfo(@"Set exposure adjustment KVO for device: %ld", (long)device.position);
[_observeController observe:device
keyPath:@keypath(device, adjustingExposure)
options:NSKeyValueObservingOptionNew
action:@selector(_adjustingExposureChanged:)];
}
- (void)_adjustingExposureChanged:(NSDictionary *)change
{
SCTraceStart();
BOOL adjustingExposure = [change[NSKeyValueChangeNewKey] boolValue];
SCLogCoreCameraInfo(@"KVO exposure changed to %d", adjustingExposure);
if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeAdjustingExposure:)]) {
[self.delegate managedCaptureDevice:self didChangeAdjustingExposure:adjustingExposure];
}
}
#pragma mark - Observe -exposurePointOfInterest
- (void)_observeExposurePointForDevice:(AVCaptureDevice *)device
{
SCTraceStart();
SCLogCoreCameraInfo(@"Set exposure point KVO for device: %ld", (long)device.position);
[_observeController observe:device
keyPath:@keypath(device, exposurePointOfInterest)
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
action:@selector(_exposurePointOfInterestChanged:)];
}
- (void)_exposurePointOfInterestChanged:(NSDictionary *)change
{
SCTraceStart();
CGPoint exposurePoint = [change[NSKeyValueChangeNewKey] CGPointValue];
SCLogCoreCameraInfo(@"KVO exposure point changed to %@", NSStringFromCGPoint(exposurePoint));
if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeExposurePoint:)]) {
[self.delegate managedCaptureDevice:self didChangeExposurePoint:exposurePoint];
}
}
#pragma mark - Observe -focusPointOfInterest
- (void)_observeFocusPointForDevice:(AVCaptureDevice *)device
{
SCTraceStart();
SCLogCoreCameraInfo(@"Set focus point KVO for device: %ld", (long)device.position);
[_observeController observe:device
keyPath:@keypath(device, focusPointOfInterest)
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
action:@selector(_focusPointOfInterestChanged:)];
}
- (void)_focusPointOfInterestChanged:(NSDictionary *)change
{
SCTraceStart();
CGPoint focusPoint = [change[NSKeyValueChangeNewKey] CGPointValue];
SCLogCoreCameraInfo(@"KVO focus point changed to %@", NSStringFromCGPoint(focusPoint));
if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeFocusPoint:)]) {
[self.delegate managedCaptureDevice:self didChangeFocusPoint:focusPoint];
}
}
- (void)dealloc
{
[_observeController unobserveAll];
}
@end

View File

@ -0,0 +1,17 @@
//
// SCManagedCaptureDeviceAutoExposureHandler.h
// Snapchat
//
// Created by Derek Peirce on 3/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <AVFoundation/AVFoundation.h>
@interface SCManagedCaptureDeviceAutoExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>
- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest;
@end

View File

@ -0,0 +1,63 @@
//
// SCManagedCaptureDeviceAutoExposureHandler.m
// Snapchat
//
// Created by Derek Peirce on 3/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceAutoExposureHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <SCFoundation/SCTrace.h>
@import AVFoundation;
@implementation SCManagedCaptureDeviceAutoExposureHandler {
CGPoint _exposurePointOfInterest;
AVCaptureDevice *_device;
}
- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest
{
if (self = [super init]) {
_device = device;
_exposurePointOfInterest = pointOfInterest;
}
return self;
}
- (CGPoint)getExposurePointOfInterest
{
return _exposurePointOfInterest;
}
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser
{
SCTraceStart();
if (!CGPointEqualToPoint(pointOfInterest, _exposurePointOfInterest)) {
if ([_device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] &&
[_device isExposurePointOfInterestSupported]) {
[_device runTask:@"set exposure"
withLockedConfiguration:^() {
// Set exposure point before changing focus mode
// Be noticed that order does matter
_device.exposurePointOfInterest = pointOfInterest;
_device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
}];
}
_exposurePointOfInterest = pointOfInterest;
}
}
- (void)setStableExposure:(BOOL)stableExposure
{
}
- (void)setVisible:(BOOL)visible
{
}
@end

View File

@ -0,0 +1,18 @@
//
// SCManagedCaptureDeviceAutoFocusHandler.h
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is used to adjust focus related parameters of camera, including focus mode and focus point.
#import "SCManagedCaptureDeviceFocusHandler.h"
#import <AVFoundation/AVFoundation.h>
@interface SCManagedCaptureDeviceAutoFocusHandler : NSObject <SCManagedCaptureDeviceFocusHandler>
- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest;
@end

View File

@ -0,0 +1,131 @@
//
// SCManagedCaptureDeviceAutoFocusHandler.m
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceAutoFocusHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@import CoreGraphics;
@interface SCManagedCaptureDeviceAutoFocusHandler ()
@property (nonatomic, assign) CGPoint focusPointOfInterest;
@property (nonatomic, strong) AVCaptureDevice *device;
@property (nonatomic, assign) BOOL isContinuousAutofocus;
@property (nonatomic, assign) BOOL isFocusLock;
@end
@implementation SCManagedCaptureDeviceAutoFocusHandler
- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest
{
if (self = [super init]) {
_device = device;
_focusPointOfInterest = pointOfInterest;
_isContinuousAutofocus = YES;
_isFocusLock = NO;
}
return self;
}
- (CGPoint)getFocusPointOfInterest
{
return self.focusPointOfInterest;
}
// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot.
// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc),
// therefore we don't have to check self.isFocusLock in this method.
- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) || self.isContinuousAutofocus)
// Do the setup immediately if the focus lock is off.
if ([self.device isFocusModeSupported:AVCaptureFocusModeAutoFocus] &&
[self.device isFocusPointOfInterestSupported]) {
[self.device runTask:@"set autofocus"
withLockedConfiguration:^() {
// Set focus point before changing focus mode
// Be noticed that order does matter
self.device.focusPointOfInterest = pointOfInterest;
self.device.focusMode = AVCaptureFocusModeAutoFocus;
}];
}
self.focusPointOfInterest = pointOfInterest;
self.isContinuousAutofocus = NO;
}
- (void)continuousAutofocus
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!self.isContinuousAutofocus);
if (!self.isFocusLock) {
// Do the setup immediately if the focus lock is off.
if ([self.device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] &&
[self.device isFocusPointOfInterestSupported]) {
[self.device runTask:@"set continuous autofocus"
withLockedConfiguration:^() {
// Set focus point before changing focus mode
// Be noticed that order does matter
self.device.focusPointOfInterest = CGPointMake(0.5, 0.5);
self.device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
}];
}
}
self.focusPointOfInterest = CGPointMake(0.5, 0.5);
self.isContinuousAutofocus = YES;
}
- (void)setFocusLock:(BOOL)focusLock
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(self.isFocusLock != focusLock);
// This is the old lock, we only do focus lock on back camera
if (focusLock) {
if ([self.device isFocusModeSupported:AVCaptureFocusModeLocked]) {
[self.device runTask:@"set focus lock on"
withLockedConfiguration:^() {
self.device.focusMode = AVCaptureFocusModeLocked;
}];
}
} else {
// Restore to previous autofocus configurations
if ([self.device isFocusModeSupported:(self.isContinuousAutofocus ? AVCaptureFocusModeContinuousAutoFocus
: AVCaptureFocusModeAutoFocus)] &&
[self.device isFocusPointOfInterestSupported]) {
[self.device runTask:@"set focus lock on"
withLockedConfiguration:^() {
self.device.focusPointOfInterest = self.focusPointOfInterest;
self.device.focusMode = self.isContinuousAutofocus ? AVCaptureFocusModeContinuousAutoFocus
: AVCaptureFocusModeAutoFocus;
}];
}
}
self.isFocusLock = focusLock;
}
- (void)setSmoothFocus:(BOOL)smoothFocus
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(smoothFocus != self.device.smoothAutoFocusEnabled);
[self.device runTask:@"set smooth autofocus"
withLockedConfiguration:^() {
[self.device setSmoothAutoFocusEnabled:smoothFocus];
}];
}
- (void)setVisible:(BOOL)visible
{
}
@end

View File

@ -0,0 +1,25 @@
//
// SCManagedCaptureDeviceDefaultZoomHandler.h
// Snapchat
//
// Created by Yu-Kuan Lai on 4/12/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <SCBase/SCMacros.h>
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
@class SCManagedCaptureDevice;
@class SCCaptureResource;
@interface SCManagedCaptureDeviceDefaultZoomHandler : NSObject
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;
- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately;
- (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device;
@end

View File

@ -0,0 +1,93 @@
//
// SCManagedCaptureDeviceDefaultZoomHandler.m
// Snapchat
//
// Created by Yu-Kuan Lai on 4/12/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h"
#import "SCCaptureResource.h"
#import "SCManagedCaptureDevice+SCManagedCapturer.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerLogging.h"
#import "SCManagedCapturerStateBuilder.h"
#import "SCMetalUtils.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCThreadHelpers.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@implementation SCManagedCaptureDeviceDefaultZoomHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super init];
if (self) {
_captureResource = captureResource;
}
return self;
}
- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately
{
[self _setZoomFactor:zoomFactor forManagedCaptureDevice:device];
}
- (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device
{
SCTraceODPCompatibleStart(2);
SCAssert([_captureResource.queuePerformer isCurrentPerformer] ||
[[SCQueuePerformer mainQueuePerformer] isCurrentPerformer],
@"");
SCAssert(device.softwareZoom, @"Only do software zoom for software zoom device");
SC_GUARD_ELSE_RETURN(!SCDeviceSupportsMetal());
float zoomFactor = device.zoomFactor;
SCLogCapturerInfo(@"Adjusting software zoom factor to: %f", zoomFactor);
AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer;
[[SCQueuePerformer mainQueuePerformer] perform:^{
[CATransaction begin];
[CATransaction setDisableActions:YES];
// I end up need to change its superlayer transform to get the zoom effect
videoPreviewLayer.superlayer.affineTransform = CGAffineTransformMakeScale(zoomFactor, zoomFactor);
[CATransaction commit];
}];
}
- (void)_setZoomFactor:(CGFloat)zoomFactor forManagedCaptureDevice:(SCManagedCaptureDevice *)device
{
SCTraceODPCompatibleStart(2);
[_captureResource.queuePerformer perform:^{
SCTraceStart();
if (device) {
SCLogCapturerInfo(@"Set zoom factor: %f -> %f", _captureResource.state.zoomFactor, zoomFactor);
[device setZoomFactor:zoomFactor];
BOOL zoomFactorChanged = NO;
// If the device is our current device, send the notification, update the
// state.
if (device.isConnected && device == _captureResource.device) {
if (device.softwareZoom) {
[self softwareZoomWithDevice:device];
}
_captureResource.state = [[[SCManagedCapturerStateBuilder
withManagedCapturerState:_captureResource.state] setZoomFactor:zoomFactor] build];
zoomFactorChanged = YES;
}
SCManagedCapturerState *state = [_captureResource.state copy];
runOnMainThreadAsynchronously(^{
if (zoomFactorChanged) {
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeState:state];
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeZoomFactor:state];
}
});
}
}];
}
@end

View File

@ -0,0 +1,17 @@
//
// SCManagedCaptureDeviceDefaultZoomHandler_Private.h
// Snapchat
//
// Created by Joe Qiao on 04/01/2018.
//
#import "SCManagedCaptureDeviceDefaultZoomHandler.h"
@interface SCManagedCaptureDeviceDefaultZoomHandler ()
@property (nonatomic, weak) SCCaptureResource *captureResource;
@property (nonatomic, weak) SCManagedCaptureDevice *currentDevice;
- (void)_setZoomFactor:(CGFloat)zoomFactor forManagedCaptureDevice:(SCManagedCaptureDevice *)device;
@end

View File

@ -0,0 +1,22 @@
//
// SCManagedCaptureDeviceExposureHandler.h
// Snapchat
//
// Created by Derek Peirce on 3/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
@protocol SCManagedCaptureDeviceExposureHandler <NSObject>
- (CGPoint)getExposurePointOfInterest;
- (void)setStableExposure:(BOOL)stableExposure;
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser;
- (void)setVisible:(BOOL)visible;
@end

View File

@ -0,0 +1,28 @@
//
// SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h
// Snapchat
//
// Created by Jiyang Zhu on 3/6/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is used to
// 1. adjust exposure related parameters of camera, including exposure mode and exposure point.
// 2. receive detected face bounds, and set exposure point to a preferred face if needed.
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <SCBase/SCMacros.h>
#import <AVFoundation/AVFoundation.h>
@protocol SCCapturer;
@interface SCManagedCaptureDeviceFaceDetectionAutoExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
managedCapturer:(id<SCCapturer>)managedCapturer;
@end

View File

@ -0,0 +1,121 @@
//
// SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m
// Snapchat
//
// Created by Jiyang Zhu on 3/6/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCCameraTweaks.h"
#import "SCManagedCaptureDeviceExposureHandler.h"
#import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerListener.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@import AVFoundation;
@interface SCManagedCaptureDeviceFaceDetectionAutoExposureHandler () <SCManagedCapturerListener>
@property (nonatomic, strong) AVCaptureDevice *device;
@property (nonatomic, weak) id<SCCapturer> managedCapturer;
@property (nonatomic, assign) CGPoint exposurePointOfInterest;
@property (nonatomic, assign) BOOL isVisible;
@property (nonatomic, copy) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;
@property (nonatomic, strong) SCManagedCaptureFaceDetectionAdjustingPOIResource *resource;
@end
@implementation SCManagedCaptureDeviceFaceDetectionAutoExposureHandler
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
managedCapturer:(id<SCCapturer>)managedCapturer
{
if (self = [super init]) {
SCAssert(device, @"AVCaptureDevice should not be nil.");
SCAssert(managedCapturer, @"id<SCCapturer> should not be nil.");
_device = device;
_exposurePointOfInterest = pointOfInterest;
SCManagedCaptureDevicePosition position =
(device.position == AVCaptureDevicePositionFront ? SCManagedCaptureDevicePositionFront
: SCManagedCaptureDevicePositionBack);
_resource = [[SCManagedCaptureFaceDetectionAdjustingPOIResource alloc]
initWithDefaultPointOfInterest:pointOfInterest
shouldTargetOnFaceAutomatically:SCCameraTweaksTurnOnFaceDetectionFocusByDefault(position)];
_managedCapturer = managedCapturer;
}
return self;
}
- (void)dealloc
{
[_managedCapturer removeListener:self];
}
- (CGPoint)getExposurePointOfInterest
{
return self.exposurePointOfInterest;
}
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser
{
SCTraceODPCompatibleStart(2);
pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:pointOfInterest fromUser:fromUser];
[self _actuallySetExposurePointOfInterestIfNeeded:pointOfInterest];
}
- (void)_actuallySetExposurePointOfInterestIfNeeded:(CGPoint)pointOfInterest
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.exposurePointOfInterest));
if ([self.device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] &&
[self.device isExposurePointOfInterestSupported]) {
[self.device runTask:@"set exposure"
withLockedConfiguration:^() {
// Set exposure point before changing exposure mode
// Be noticed that order does matter
self.device.exposurePointOfInterest = pointOfInterest;
self.device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
}];
}
self.exposurePointOfInterest = pointOfInterest;
}
- (void)setStableExposure:(BOOL)stableExposure
{
}
- (void)setVisible:(BOOL)visible
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(_isVisible != visible);
_isVisible = visible;
if (visible) {
[self.managedCapturer addListener:self];
} else {
[self.managedCapturer removeListener:self];
[self.resource reset];
}
}
#pragma mark - SCManagedCapturerListener
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(self.isVisible);
CGPoint pointOfInterest = [self.resource updateWithNewDetectedFaceBounds:faceBoundsByFaceID];
[self _actuallySetExposurePointOfInterestIfNeeded:pointOfInterest];
}
@end

View File

@ -0,0 +1,28 @@
//
// SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is used to
// 1. adjust focus related parameters of camera, including focus mode and focus point.
// 2. receive detected face bounds, and focus to a preferred face if needed.
#import "SCManagedCaptureDeviceFocusHandler.h"
#import <SCBase/SCMacros.h>
#import <AVFoundation/AVFoundation.h>
@protocol SCCapturer;
@interface SCManagedCaptureDeviceFaceDetectionAutoFocusHandler : NSObject <SCManagedCaptureDeviceFocusHandler>
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
managedCapturer:(id<SCCapturer>)managedCapturer;
@end

View File

@ -0,0 +1,153 @@
//
// SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCCameraTweaks.h"
#import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerListener.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCManagedCaptureDeviceFaceDetectionAutoFocusHandler () <SCManagedCapturerListener>
@property (nonatomic, strong) AVCaptureDevice *device;
@property (nonatomic, weak) id<SCCapturer> managedCapturer;
@property (nonatomic, assign) CGPoint focusPointOfInterest;
@property (nonatomic, assign) BOOL isVisible;
@property (nonatomic, assign) BOOL isContinuousAutofocus;
@property (nonatomic, assign) BOOL focusLock;
@property (nonatomic, copy) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;
@property (nonatomic, strong) SCManagedCaptureFaceDetectionAdjustingPOIResource *resource;
@end
@implementation SCManagedCaptureDeviceFaceDetectionAutoFocusHandler
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
managedCapturer:(id<SCCapturer>)managedCapturer
{
if (self = [super init]) {
SCAssert(device, @"AVCaptureDevice should not be nil.");
SCAssert(managedCapturer, @"id<SCCapturer> should not be nil.");
_device = device;
_focusPointOfInterest = pointOfInterest;
SCManagedCaptureDevicePosition position =
(device.position == AVCaptureDevicePositionFront ? SCManagedCaptureDevicePositionFront
: SCManagedCaptureDevicePositionBack);
_resource = [[SCManagedCaptureFaceDetectionAdjustingPOIResource alloc]
initWithDefaultPointOfInterest:pointOfInterest
shouldTargetOnFaceAutomatically:SCCameraTweaksTurnOnFaceDetectionFocusByDefault(position)];
_managedCapturer = managedCapturer;
}
return self;
}
- (CGPoint)getFocusPointOfInterest
{
return self.focusPointOfInterest;
}
// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot.
// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc),
// therefore we don't have to check self.focusLock in this method.
- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest
{
SCTraceODPCompatibleStart(2);
pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:pointOfInterest fromUser:YES];
SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) ||
self.isContinuousAutofocus);
[self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest
withFocusMode:AVCaptureFocusModeAutoFocus
taskName:@"set autofocus"];
}
- (void)continuousAutofocus
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!self.isContinuousAutofocus);
CGPoint pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO];
[self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest
withFocusMode:AVCaptureFocusModeContinuousAutoFocus
taskName:@"set continuous autofocus"];
}
- (void)setFocusLock:(BOOL)focusLock
{
// Disabled focus lock for face detection and focus handler.
}
- (void)setSmoothFocus:(BOOL)smoothFocus
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(smoothFocus != self.device.smoothAutoFocusEnabled);
[self.device runTask:@"set smooth autofocus"
withLockedConfiguration:^() {
[self.device setSmoothAutoFocusEnabled:smoothFocus];
}];
}
- (void)setVisible:(BOOL)visible
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(_isVisible != visible);
self.isVisible = visible;
if (visible) {
[[SCManagedCapturer sharedInstance] addListener:self];
} else {
[[SCManagedCapturer sharedInstance] removeListener:self];
[self.resource reset];
}
}
- (void)_actuallySetFocusPointOfInterestIfNeeded:(CGPoint)pointOfInterest
withFocusMode:(AVCaptureFocusMode)focusMode
taskName:(NSString *)taskName
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) &&
[self.device isFocusModeSupported:focusMode] && [self.device isFocusPointOfInterestSupported]);
[self.device runTask:taskName
withLockedConfiguration:^() {
// Set focus point before changing focus mode
// Be noticed that order does matter
self.device.focusPointOfInterest = pointOfInterest;
self.device.focusMode = focusMode;
}];
self.focusPointOfInterest = pointOfInterest;
self.isContinuousAutofocus = (focusMode == AVCaptureFocusModeContinuousAutoFocus);
}
#pragma mark - SCManagedCapturerListener
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN(self.isVisible);
CGPoint pointOfInterest = [self.resource updateWithNewDetectedFaceBounds:faceBoundsByFaceID];
// If pointOfInterest is equal to CGPointMake(0.5, 0.5), it means no valid face is found, so that we should reset to
// AVCaptureFocusModeContinuousAutoFocus. Otherwise, focus on the point and set the mode as
// AVCaptureFocusModeAutoFocus.
// TODO(Jiyang): Refactor SCManagedCaptureFaceDetectionAdjustingPOIResource to include focusMode and exposureMode.
AVCaptureFocusMode focusMode = CGPointEqualToPoint(pointOfInterest, CGPointMake(0.5, 0.5))
? AVCaptureFocusModeContinuousAutoFocus
: AVCaptureFocusModeAutoFocus;
[self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest
withFocusMode:focusMode
taskName:@"set autofocus from face detection"];
}
@end

View File

@ -0,0 +1,28 @@
//
// SCManagedCaptureDeviceFocusHandler.h
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
@protocol SCManagedCaptureDeviceFocusHandler <NSObject>
- (CGPoint)getFocusPointOfInterest;
/// Called when subject area changes.
- (void)continuousAutofocus;
/// Called when user taps.
- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest;
- (void)setSmoothFocus:(BOOL)smoothFocus;
- (void)setFocusLock:(BOOL)focusLock;
- (void)setVisible:(BOOL)visible;
@end

View File

@ -0,0 +1,23 @@
//
// SCManagedCaptureDeviceHandler.h
// Snapchat
//
// Created by Jiyang Zhu on 3/8/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDevice.h"
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
@class SCCaptureResource;
@interface SCManagedCaptureDeviceHandler : NSObject <SCManagedCaptureDeviceDelegate>
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;
@end

View File

@ -0,0 +1,77 @@
//
// SCManagedCaptureDeviceHandler.m
// Snapchat
//
// Created by Jiyang Zhu on 3/8/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceHandler.h"
#import "SCCaptureResource.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerLogging.h"
#import "SCManagedCapturerState.h"
#import "SCManagedCapturerStateBuilder.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCThreadHelpers.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCManagedCaptureDeviceHandler ()
@property (nonatomic, weak) SCCaptureResource *captureResource;
@end
@implementation SCManagedCaptureDeviceHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super init];
if (self) {
SCAssert(captureResource, @"SCCaptureResource should not be nil.");
_captureResource = captureResource;
}
return self;
}
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeAdjustingExposure:(BOOL)adjustingExposure
{
SC_GUARD_ELSE_RETURN(device == _captureResource.device);
SCTraceODPCompatibleStart(2);
SCLogCapturerInfo(@"KVO Changes adjustingExposure %d", adjustingExposure);
[_captureResource.queuePerformer perform:^{
_captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]
setAdjustingExposure:adjustingExposure] build];
SCManagedCapturerState *state = [_captureResource.state copy];
runOnMainThreadAsynchronously(^{
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeAdjustingExposure:state];
});
}];
}
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeExposurePoint:(CGPoint)exposurePoint
{
SC_GUARD_ELSE_RETURN(device == self.captureResource.device);
SCTraceODPCompatibleStart(2);
runOnMainThreadAsynchronously(^{
[self.captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeExposurePoint:exposurePoint];
});
}
- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeFocusPoint:(CGPoint)focusPoint
{
SC_GUARD_ELSE_RETURN(device == self.captureResource.device);
SCTraceODPCompatibleStart(2);
runOnMainThreadAsynchronously(^{
[self.captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeFocusPoint:focusPoint];
});
}
@end

View File

@ -0,0 +1,12 @@
//
// SCManagedCaptureDeviceLinearInterpolationZoomHandler.h
// Snapchat
//
// Created by Joe Qiao on 03/01/2018.
//
#import "SCManagedCaptureDeviceDefaultZoomHandler.h"
@interface SCManagedCaptureDeviceLinearInterpolationZoomHandler : SCManagedCaptureDeviceDefaultZoomHandler
@end

View File

@ -0,0 +1,190 @@
//
// SCManagedCaptureDeviceLinearInterpolationZoomHandler.m
// Snapchat
//
// Created by Joe Qiao on 03/01/2018.
//
#import "SCManagedCaptureDeviceLinearInterpolationZoomHandler.h"
#import "SCCameraTweaks.h"
#import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h"
#import "SCManagedCapturerLogging.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCMathUtils.h>
@interface SCManagedCaptureDeviceLinearInterpolationZoomHandler ()
@property (nonatomic, strong) CADisplayLink *displayLink;
@property (nonatomic, assign) double timestamp;
@property (nonatomic, assign) float targetFactor;
@property (nonatomic, assign) float intermediateFactor;
@property (nonatomic, assign) int trend;
@property (nonatomic, assign) float stepLength;
@end
@implementation SCManagedCaptureDeviceLinearInterpolationZoomHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super initWithCaptureResource:captureResource];
if (self) {
_timestamp = -1.0;
_targetFactor = 1.0;
_intermediateFactor = _targetFactor;
_trend = 1;
_stepLength = 0.0;
}
return self;
}
- (void)dealloc
{
[self _invalidate];
}
- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately
{
if (self.currentDevice != device) {
if (_displayLink) {
// if device changed, interupt smoothing process
// and reset to target zoom factor immediately
[self _resetToZoomFactor:_targetFactor];
}
self.currentDevice = device;
immediately = YES;
}
if (immediately) {
[self _resetToZoomFactor:zoomFactor];
} else {
[self _addTargetZoomFactor:zoomFactor];
}
}
#pragma mark - Configurable
// smoothen if the update time interval is greater than the threshold
- (double)_thresholdTimeIntervalToSmoothen
{
return SCCameraTweaksSmoothZoomThresholdTime();
}
- (double)_thresholdFactorDiffToSmoothen
{
return SCCameraTweaksSmoothZoomThresholdFactor();
}
- (int)_intermediateFactorFramesPerSecond
{
return SCCameraTweaksSmoothZoomIntermediateFramesPerSecond();
}
- (double)_delayTolerantTime
{
return SCCameraTweaksSmoothZoomDelayTolerantTime();
}
// minimum step length between two intermediate factors,
// the greater the better as long as could provide a 'smooth experience' during smoothing process
- (float)_minimumStepLength
{
return SCCameraTweaksSmoothZoomMinStepLength();
}
#pragma mark - Private methods
- (void)_addTargetZoomFactor:(float)factor
{
SCAssertMainThread();
SCLogCapturerInfo(@"Smooth Zoom - [1] t=%f zf=%f", CACurrentMediaTime(), factor);
if (SCFloatEqual(factor, _targetFactor)) {
return;
}
_targetFactor = factor;
float diff = _targetFactor - _intermediateFactor;
if ([self _isDuringSmoothingProcess]) {
// during smoothing, only update data
[self _updateDataWithDiff:diff];
} else {
double curTimestamp = CACurrentMediaTime();
if (!SCFloatEqual(_timestamp, -1.0) && (curTimestamp - _timestamp) > [self _thresholdTimeIntervalToSmoothen] &&
ABS(diff) > [self _thresholdFactorDiffToSmoothen]) {
// need smoothing
[self _updateDataWithDiff:diff];
if ([self _nextStep]) {
// use timer to interpolate intermediate factors to avoid sharp jump
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(_nextStep)];
_displayLink.preferredFramesPerSecond = [self _intermediateFactorFramesPerSecond];
[_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
} else {
_timestamp = curTimestamp;
_intermediateFactor = factor;
SCLogCapturerInfo(@"Smooth Zoom - [2] t=%f zf=%f", CACurrentMediaTime(), _intermediateFactor);
[self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice];
}
}
}
- (void)_resetToZoomFactor:(float)factor
{
[self _invalidate];
_timestamp = -1.0;
_targetFactor = factor;
_intermediateFactor = _targetFactor;
[self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice];
}
- (BOOL)_nextStep
{
_timestamp = CACurrentMediaTime();
_intermediateFactor += (_trend * _stepLength);
BOOL hasNext = YES;
if (_trend < 0.0) {
_intermediateFactor = MAX(_intermediateFactor, _targetFactor);
} else {
_intermediateFactor = MIN(_intermediateFactor, _targetFactor);
}
SCLogCapturerInfo(@"Smooth Zoom - [3] t=%f zf=%f", CACurrentMediaTime(), _intermediateFactor);
[self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice];
if (SCFloatEqual(_intermediateFactor, _targetFactor)) {
// finish smoothening
[self _invalidate];
hasNext = NO;
}
return hasNext;
}
- (void)_invalidate
{
[_displayLink invalidate];
_displayLink = nil;
_trend = 1;
_stepLength = 0.0;
}
- (void)_updateDataWithDiff:(CGFloat)diff
{
_trend = diff < 0.0 ? -1 : 1;
_stepLength =
MAX(_stepLength, MAX([self _minimumStepLength],
ABS(diff) / ([self _delayTolerantTime] * [self _intermediateFactorFramesPerSecond])));
}
- (BOOL)_isDuringSmoothingProcess
{
return (_displayLink ? YES : NO);
}
@end

View File

@ -0,0 +1,20 @@
//
// SCManagedCaptureDeviceLockOnRecordExposureHandler.h
// Snapchat
//
// Created by Derek Peirce on 3/24/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <AVFoundation/AVFoundation.h>
// An exposure handler that prevents any changes in exposure as soon as recording begins
@interface SCManagedCaptureDeviceLockOnRecordExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
allowTap:(BOOL)allowTap;
@end

View File

@ -0,0 +1,90 @@
//
// SCManagedCaptureDeviceLockOnRecordExposureHandler.m
// Snapchat
//
// Created by Derek Peirce on 3/24/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceLockOnRecordExposureHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCExposureState.h"
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <SCFoundation/SCTrace.h>
@import AVFoundation;
@implementation SCManagedCaptureDeviceLockOnRecordExposureHandler {
CGPoint _exposurePointOfInterest;
AVCaptureDevice *_device;
// allows the exposure to change when the user taps to refocus
BOOL _allowTap;
SCExposureState *_exposureState;
}
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
allowTap:(BOOL)allowTap
{
if (self = [super init]) {
_device = device;
_exposurePointOfInterest = pointOfInterest;
_allowTap = allowTap;
}
return self;
}
- (CGPoint)getExposurePointOfInterest
{
return _exposurePointOfInterest;
}
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser
{
SCTraceStart();
BOOL locked = _device.exposureMode == AVCaptureExposureModeLocked ||
_device.exposureMode == AVCaptureExposureModeCustom ||
_device.exposureMode == AVCaptureExposureModeAutoExpose;
if (!locked || (fromUser && _allowTap)) {
AVCaptureExposureMode exposureMode =
(locked ? AVCaptureExposureModeAutoExpose : AVCaptureExposureModeContinuousAutoExposure);
if ([_device isExposureModeSupported:exposureMode] && [_device isExposurePointOfInterestSupported]) {
[_device runTask:@"set exposure point"
withLockedConfiguration:^() {
// Set exposure point before changing focus mode
// Be noticed that order does matter
_device.exposurePointOfInterest = pointOfInterest;
_device.exposureMode = exposureMode;
}];
}
_exposurePointOfInterest = pointOfInterest;
}
}
- (void)setStableExposure:(BOOL)stableExposure
{
AVCaptureExposureMode exposureMode =
stableExposure ? AVCaptureExposureModeLocked : AVCaptureExposureModeContinuousAutoExposure;
if ([_device isExposureModeSupported:exposureMode]) {
[_device runTask:@"set stable exposure"
withLockedConfiguration:^() {
_device.exposureMode = exposureMode;
}];
}
}
- (void)setVisible:(BOOL)visible
{
if (visible) {
if (_device.exposureMode == AVCaptureExposureModeLocked ||
_device.exposureMode == AVCaptureExposureModeCustom) {
[_exposureState applyISOAndExposureDurationToDevice:_device];
}
} else {
_exposureState = [[SCExposureState alloc] initWithDevice:_device];
}
}
@end

View File

@ -0,0 +1,13 @@
//
// SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h
// Snapchat
//
// Created by Yu-Kuan Lai on 4/12/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceDefaultZoomHandler.h"
@interface SCManagedCaptureDeviceSavitzkyGolayZoomHandler : SCManagedCaptureDeviceDefaultZoomHandler
@end

View File

@ -0,0 +1,95 @@
//
// SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m
// Snapchat
//
// Created by Yu-Kuan Lai on 4/12/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
// https://en.wikipedia.org/wiki/Savitzky%E2%80%93Golay_filter
//
#import "SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h"
#import "SCManagedCaptureDevice.h"
#import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h"
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTraceODPCompatible.h>
static NSUInteger const kSCSavitzkyGolayWindowSize = 9;
static CGFloat const kSCUpperSharpZoomThreshold = 1.15;
@interface SCManagedCaptureDeviceSavitzkyGolayZoomHandler ()
@property (nonatomic, strong) NSMutableArray *zoomFactorHistoryArray;
@end
@implementation SCManagedCaptureDeviceSavitzkyGolayZoomHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super initWithCaptureResource:captureResource];
if (self) {
_zoomFactorHistoryArray = [[NSMutableArray alloc] init];
}
return self;
}
- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately
{
if (self.currentDevice != device) {
// reset if device changed
self.currentDevice = device;
[self _resetZoomFactor:zoomFactor forDevice:self.currentDevice];
return;
}
if (immediately || zoomFactor == 1 || _zoomFactorHistoryArray.count == 0) {
// reset if zoomFactor is 1 or this is the first data point
[self _resetZoomFactor:zoomFactor forDevice:device];
return;
}
CGFloat lastVal = [[_zoomFactorHistoryArray lastObject] floatValue];
CGFloat upperThreshold = lastVal * kSCUpperSharpZoomThreshold;
if (zoomFactor > upperThreshold) {
// sharp change in zoomFactor, reset
[self _resetZoomFactor:zoomFactor forDevice:device];
return;
}
[_zoomFactorHistoryArray addObject:@(zoomFactor)];
if ([_zoomFactorHistoryArray count] > kSCSavitzkyGolayWindowSize) {
[_zoomFactorHistoryArray removeObjectAtIndex:0];
}
float filteredZoomFactor =
SC_CLAMP([self _savitzkyGolayFilteredZoomFactor], kSCMinVideoZoomFactor, kSCMaxVideoZoomFactor);
[self _setZoomFactor:filteredZoomFactor forManagedCaptureDevice:device];
}
- (CGFloat)_savitzkyGolayFilteredZoomFactor
{
if ([_zoomFactorHistoryArray count] == kSCSavitzkyGolayWindowSize) {
CGFloat filteredZoomFactor =
59 * [_zoomFactorHistoryArray[4] floatValue] +
54 * ([_zoomFactorHistoryArray[3] floatValue] + [_zoomFactorHistoryArray[5] floatValue]) +
39 * ([_zoomFactorHistoryArray[2] floatValue] + [_zoomFactorHistoryArray[6] floatValue]) +
14 * ([_zoomFactorHistoryArray[1] floatValue] + [_zoomFactorHistoryArray[7] floatValue]) -
21 * ([_zoomFactorHistoryArray[0] floatValue] + [_zoomFactorHistoryArray[8] floatValue]);
filteredZoomFactor /= 231;
return filteredZoomFactor;
} else {
return [[_zoomFactorHistoryArray lastObject] floatValue]; // use zoomFactor directly if we have less than 9
}
}
- (void)_resetZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device
{
[_zoomFactorHistoryArray removeAllObjects];
[_zoomFactorHistoryArray addObject:@(zoomFactor)];
[self _setZoomFactor:zoomFactor forManagedCaptureDevice:device];
}
@end

View File

@ -0,0 +1,23 @@
//
// SCManagedCaptureDeviceSubjectAreaHandler.h
// Snapchat
//
// Created by Xiaokang Liu on 19/03/2018.
//
// This class is used to handle the AVCaptureDeviceSubjectAreaDidChangeNotification notification for SCManagedCapturer.
// To reset device's settings when the subject area changed
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
@class SCCaptureResource;
@protocol SCCapturer;
@interface SCManagedCaptureDeviceSubjectAreaHandler : NSObject
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER;
- (void)stopObserving;
- (void)startObserving;
@end

View File

@ -0,0 +1,67 @@
//
// SCManagedCaptureDeviceSubjectAreaHandler.m
// Snapchat
//
// Created by Xiaokang Liu on 19/03/2018.
//
#import "SCManagedCaptureDeviceSubjectAreaHandler.h"
#import "SCCameraTweaks.h"
#import "SCCaptureResource.h"
#import "SCCaptureWorker.h"
#import "SCManagedCaptureDevice+SCManagedCapturer.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerState.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
@interface SCManagedCaptureDeviceSubjectAreaHandler () {
__weak SCCaptureResource *_captureResource;
}
@end
@implementation SCManagedCaptureDeviceSubjectAreaHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super init];
if (self) {
SCAssert(captureResource, @"");
_captureResource = captureResource;
}
return self;
}
- (void)stopObserving
{
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVCaptureDeviceSubjectAreaDidChangeNotification
object:nil];
}
- (void)startObserving
{
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_subjectAreaDidChange:)
name:AVCaptureDeviceSubjectAreaDidChangeNotification
object:nil];
}
#pragma mark - Private methods
- (void)_subjectAreaDidChange:(NSDictionary *)notification
{
[_captureResource.queuePerformer perform:^{
if (_captureResource.device.isConnected && !_captureResource.state.arSessionActive) {
// Reset to continuous autofocus when the subject area changed
[_captureResource.device continuousAutofocus];
[_captureResource.device setExposurePointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO];
if (SCCameraTweaksEnablePortraitModeAutofocus()) {
[SCCaptureWorker setPortraitModePointOfInterestAsynchronously:CGPointMake(0.5, 0.5)
completionHandler:nil
resource:_captureResource];
}
}
}];
}
@end

View File

@ -0,0 +1,19 @@
//
// SCManagedCaptureDeviceThresholdExposureHandler.h
// Snapchat
//
// Created by Derek Peirce on 4/11/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <AVFoundation/AVFoundation.h>
@interface SCManagedCaptureDeviceThresholdExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
threshold:(CGFloat)threshold;
@end

View File

@ -0,0 +1,133 @@
//
// SCManagedCaptureDeviceThresholdExposureHandler.m
// Snapchat
//
// Created by Derek Peirce on 4/11/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureDeviceThresholdExposureHandler.h"
#import "AVCaptureDevice+ConfigurationLock.h"
#import "SCCameraTweaks.h"
#import "SCExposureState.h"
#import "SCManagedCaptureDeviceExposureHandler.h"
#import <SCFoundation/SCTrace.h>
#import <FBKVOController/FBKVOController.h>
@import AVFoundation;
@implementation SCManagedCaptureDeviceThresholdExposureHandler {
AVCaptureDevice *_device;
CGPoint _exposurePointOfInterest;
CGFloat _threshold;
// allows the exposure to change when the user taps to refocus
SCExposureState *_exposureState;
FBKVOController *_kvoController;
}
- (instancetype)initWithDevice:(AVCaptureDevice *)device
pointOfInterest:(CGPoint)pointOfInterest
threshold:(CGFloat)threshold
{
if (self = [super init]) {
_device = device;
_exposurePointOfInterest = pointOfInterest;
_threshold = threshold;
_kvoController = [FBKVOController controllerWithObserver:self];
@weakify(self);
[_kvoController observe:device
keyPath:NSStringFromSelector(@selector(exposureMode))
options:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew
block:^(id observer, id object, NSDictionary *change) {
@strongify(self);
AVCaptureExposureMode old =
(AVCaptureExposureMode)[(NSNumber *)change[NSKeyValueChangeOldKey] intValue];
AVCaptureExposureMode new =
(AVCaptureExposureMode)[(NSNumber *)change[NSKeyValueChangeNewKey] intValue];
if (old == AVCaptureExposureModeAutoExpose && new == AVCaptureExposureModeLocked) {
// auto expose is done, go back to custom
self->_exposureState = [[SCExposureState alloc] initWithDevice:self->_device];
[self->_exposureState applyISOAndExposureDurationToDevice:self->_device];
}
}];
[_kvoController observe:device
keyPath:NSStringFromSelector(@selector(exposureTargetOffset))
options:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew
block:^(id observer, id object, NSDictionary *change) {
@strongify(self);
if (self->_device.exposureMode == AVCaptureExposureModeCustom) {
CGFloat offset = [(NSNumber *)change[NSKeyValueChangeOldKey] floatValue];
if (fabs(offset) > self->_threshold) {
[self->_device runTask:@"set exposure point"
withLockedConfiguration:^() {
// Set exposure point before changing focus mode
// Be noticed that order does matter
self->_device.exposurePointOfInterest = CGPointMake(0.5, 0.5);
self->_device.exposureMode = AVCaptureExposureModeAutoExpose;
}];
}
}
}];
}
return self;
}
- (CGPoint)getExposurePointOfInterest
{
return _exposurePointOfInterest;
}
- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser
{
SCTraceStart();
BOOL locked = _device.exposureMode == AVCaptureExposureModeLocked ||
_device.exposureMode == AVCaptureExposureModeCustom ||
_device.exposureMode == AVCaptureExposureModeAutoExpose;
if (!locked || fromUser) {
AVCaptureExposureMode exposureMode =
(locked ? AVCaptureExposureModeAutoExpose : AVCaptureExposureModeContinuousAutoExposure);
if ([_device isExposureModeSupported:exposureMode] && [_device isExposurePointOfInterestSupported]) {
[_device runTask:@"set exposure point"
withLockedConfiguration:^() {
// Set exposure point before changing focus mode
// Be noticed that order does matter
_device.exposurePointOfInterest = pointOfInterest;
_device.exposureMode = exposureMode;
}];
}
_exposurePointOfInterest = pointOfInterest;
}
}
- (void)setStableExposure:(BOOL)stableExposure
{
if (stableExposure) {
_exposureState = [[SCExposureState alloc] initWithDevice:_device];
[_exposureState applyISOAndExposureDurationToDevice:_device];
} else {
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
if ([_device isExposureModeSupported:exposureMode]) {
[_device runTask:@"set exposure point"
withLockedConfiguration:^() {
_device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
}];
}
}
}
- (void)setVisible:(BOOL)visible
{
if (visible) {
if (_device.exposureMode == AVCaptureExposureModeLocked ||
_device.exposureMode == AVCaptureExposureModeCustom) {
[_exposureState applyISOAndExposureDurationToDevice:_device];
}
} else {
_exposureState = [[SCExposureState alloc] initWithDevice:_device];
}
}
@end

View File

@ -0,0 +1,61 @@
//
// SCManagedCaptureFaceDetectionAdjustingPOIResource.h
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
// This class is used to keep several properties for face detection and focus/exposure. It provides methods to help
// FaceDetectionAutoFocusHandler and FaceDetectionAutoExposureHandler to deal with the point of interest setting events
// from user taps, subject area changes, and face detection, by updating itself and return the actual point of
// interest.
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
typedef NS_ENUM(NSInteger, SCManagedCaptureFaceDetectionAdjustingPOIMode) {
SCManagedCaptureFaceDetectionAdjustingPOIModeNone = 0,
SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace,
SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace,
};
@interface SCManagedCaptureFaceDetectionAdjustingPOIResource : NSObject
@property (nonatomic, assign) CGPoint pointOfInterest;
@property (nonatomic, strong) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;
@property (nonatomic, assign) SCManagedCaptureFaceDetectionAdjustingPOIMode adjustingPOIMode;
@property (nonatomic, assign) BOOL shouldTargetOnFaceAutomatically;
@property (nonatomic, strong) NSNumber *targetingFaceID;
@property (nonatomic, assign) CGRect targetingFaceBounds;
- (instancetype)initWithDefaultPointOfInterest:(CGPoint)pointOfInterest
shouldTargetOnFaceAutomatically:(BOOL)shouldTargetOnFaceAutomatically;
- (void)reset;
/**
Update SCManagedCaptureFaceDetectionAdjustingPOIResource when a new POI adjustment comes. It will find the face that
the proposedPoint belongs to, return the center of the face, if the adjustingPOIMode and fromUser meets the
requirements.
@param proposedPoint
The point of interest that upper level wants to set.
@param fromUser
Whether the setting is from user's tap or not.
@return
The actual point of interest that should be applied.
*/
- (CGPoint)updateWithNewProposedPointOfInterest:(CGPoint)proposedPoint fromUser:(BOOL)fromUser;
/**
Update SCManagedCaptureFaceDetectionAdjustingPOIResource when new detected face bounds comes.
@param faceBoundsByFaceID
A dictionary. Key: FaceID as NSNumber. Value: FaceBounds as CGRect.
@return
The actual point of interest that should be applied.
*/
- (CGPoint)updateWithNewDetectedFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID;
@end

View File

@ -0,0 +1,232 @@
//
// SCManagedCaptureFaceDetectionAdjustingPOIResource.m
// Snapchat
//
// Created by Jiyang Zhu on 3/7/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h"
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@implementation SCManagedCaptureFaceDetectionAdjustingPOIResource {
CGPoint _defaultPointOfInterest;
}
#pragma mark - Public Methods
- (instancetype)initWithDefaultPointOfInterest:(CGPoint)pointOfInterest
shouldTargetOnFaceAutomatically:(BOOL)shouldTargetOnFaceAutomatically
{
if (self = [super init]) {
_pointOfInterest = pointOfInterest;
_defaultPointOfInterest = pointOfInterest;
_shouldTargetOnFaceAutomatically = shouldTargetOnFaceAutomatically;
}
return self;
}
- (void)reset
{
SCTraceODPCompatibleStart(2);
self.adjustingPOIMode = SCManagedCaptureFaceDetectionAdjustingPOIModeNone;
self.targetingFaceID = nil;
self.targetingFaceBounds = CGRectZero;
self.faceBoundsByFaceID = nil;
self.pointOfInterest = _defaultPointOfInterest;
}
- (CGPoint)updateWithNewProposedPointOfInterest:(CGPoint)proposedPoint fromUser:(BOOL)fromUser
{
SCTraceODPCompatibleStart(2);
if (fromUser) {
NSNumber *faceID =
[self _getFaceIDOfFaceBoundsContainingPoint:proposedPoint fromFaceBounds:self.faceBoundsByFaceID];
if (faceID && [faceID integerValue] >= 0) {
CGPoint point = [self _getPointOfInterestWithFaceID:faceID fromFaceBounds:self.faceBoundsByFaceID];
if ([self _isPointOfInterestValid:point]) {
[self _setPointOfInterest:point
targetingFaceID:faceID
adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace];
} else {
[self _setPointOfInterest:proposedPoint
targetingFaceID:nil
adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace];
}
} else {
[self _setPointOfInterest:proposedPoint
targetingFaceID:nil
adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace];
}
} else {
[self _setPointOfInterest:proposedPoint
targetingFaceID:nil
adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeNone];
}
return self.pointOfInterest;
}
- (CGPoint)updateWithNewDetectedFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
self.faceBoundsByFaceID = faceBoundsByFaceID;
switch (self.adjustingPOIMode) {
case SCManagedCaptureFaceDetectionAdjustingPOIModeNone: {
if (self.shouldTargetOnFaceAutomatically) {
[self _focusOnPreferredFaceInFaceBounds:self.faceBoundsByFaceID];
}
} break;
case SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace: {
BOOL isFocusingOnCurrentTargetingFaceSuccess =
[self _focusOnFaceWithTargetFaceID:self.targetingFaceID inFaceBounds:self.faceBoundsByFaceID];
if (!isFocusingOnCurrentTargetingFaceSuccess && self.shouldTargetOnFaceAutomatically) {
// If the targeted face has disappeared, and shouldTargetOnFaceAutomatically is YES, automatically target on
// the next preferred face.
[self _focusOnPreferredFaceInFaceBounds:self.faceBoundsByFaceID];
}
} break;
case SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace:
// The point of interest should be fixed at a non-face point where user tapped before.
break;
}
return self.pointOfInterest;
}
#pragma mark - Internal Methods
- (BOOL)_focusOnPreferredFaceInFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
NSNumber *preferredFaceID = [self _getPreferredFaceIDFromFaceBounds:faceBoundsByFaceID];
return [self _focusOnFaceWithTargetFaceID:preferredFaceID inFaceBounds:faceBoundsByFaceID];
}
- (BOOL)_focusOnFaceWithTargetFaceID:(NSNumber *)preferredFaceID
inFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN_VALUE(preferredFaceID, NO);
NSValue *faceBoundsValue = [faceBoundsByFaceID objectForKey:preferredFaceID];
if (faceBoundsValue) {
CGRect faceBounds = [faceBoundsValue CGRectValue];
CGPoint proposedPoint = CGPointMake(CGRectGetMidX(faceBounds), CGRectGetMidY(faceBounds));
if ([self _isPointOfInterestValid:proposedPoint]) {
if ([self _shouldChangeToNewPoint:proposedPoint withNewFaceID:preferredFaceID newFaceBounds:faceBounds]) {
[self _setPointOfInterest:proposedPoint
targetingFaceID:preferredFaceID
adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace];
}
return YES;
}
}
[self reset];
return NO;
}
- (void)_setPointOfInterest:(CGPoint)pointOfInterest
targetingFaceID:(NSNumber *)targetingFaceID
adjustingPOIMode:(SCManagedCaptureFaceDetectionAdjustingPOIMode)adjustingPOIMode
{
SCTraceODPCompatibleStart(2);
self.pointOfInterest = pointOfInterest;
self.targetingFaceID = targetingFaceID;
if (targetingFaceID) { // If targetingFaceID exists, record the current face bounds.
self.targetingFaceBounds = [[self.faceBoundsByFaceID objectForKey:targetingFaceID] CGRectValue];
} else { // Otherwise, reset targetingFaceBounds to zero.
self.targetingFaceBounds = CGRectZero;
}
self.adjustingPOIMode = adjustingPOIMode;
}
- (BOOL)_isPointOfInterestValid:(CGPoint)pointOfInterest
{
return (pointOfInterest.x >= 0 && pointOfInterest.x <= 1 && pointOfInterest.y >= 0 && pointOfInterest.y <= 1);
}
- (NSNumber *)_getPreferredFaceIDFromFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN_VALUE(faceBoundsByFaceID.count > 0, nil);
// Find out the bounds with the max area.
__block NSNumber *preferredFaceID = nil;
__block CGFloat maxArea = 0;
[faceBoundsByFaceID
enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) {
CGRect faceBounds = [obj CGRectValue];
CGFloat area = CGRectGetWidth(faceBounds) * CGRectGetHeight(faceBounds);
if (area > maxArea) {
preferredFaceID = key;
maxArea = area;
}
}];
return preferredFaceID;
}
- (CGPoint)_getPointOfInterestWithFaceID:(NSNumber *)faceID
fromFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCTraceODPCompatibleStart(2);
NSValue *faceBoundsValue = [faceBoundsByFaceID objectForKey:faceID];
if (faceBoundsValue) {
CGRect faceBounds = [faceBoundsValue CGRectValue];
CGPoint point = CGPointMake(CGRectGetMidX(faceBounds), CGRectGetMidY(faceBounds));
return point;
} else {
return CGPointMake(-1, -1); // An invalid point.
}
}
/**
Setting a new focus/exposure point needs high CPU usage, so we only set a new POI when we have to. This method is to
return whether setting this new point if necessary.
If not, there is no need to change the POI.
*/
- (BOOL)_shouldChangeToNewPoint:(CGPoint)newPoint
withNewFaceID:(NSNumber *)newFaceID
newFaceBounds:(CGRect)newFaceBounds
{
SCTraceODPCompatibleStart(2);
BOOL shouldChange = NO;
if (!newFaceID || !self.targetingFaceID ||
![newFaceID isEqualToNumber:self.targetingFaceID]) { // Return YES if it is a new face.
shouldChange = YES;
} else if (CGRectEqualToRect(self.targetingFaceBounds, CGRectZero) ||
!CGRectContainsPoint(self.targetingFaceBounds,
newPoint)) { // Return YES if the new point if out of the current face bounds.
shouldChange = YES;
} else {
CGFloat currentBoundsArea =
CGRectGetWidth(self.targetingFaceBounds) * CGRectGetHeight(self.targetingFaceBounds);
CGFloat newBoundsArea = CGRectGetWidth(newFaceBounds) * CGRectGetHeight(newFaceBounds);
if (newBoundsArea >= currentBoundsArea * 1.2 ||
newBoundsArea <=
currentBoundsArea *
0.8) { // Return YES if the area of new bounds if over 20% more or 20% less than the current one.
shouldChange = YES;
}
}
return shouldChange;
}
- (NSNumber *)_getFaceIDOfFaceBoundsContainingPoint:(CGPoint)point
fromFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SC_GUARD_ELSE_RETURN_VALUE(faceBoundsByFaceID.count > 0, nil);
__block NSNumber *faceID = nil;
[faceBoundsByFaceID
enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) {
CGRect faceBounds = [obj CGRectValue];
if (CGRectContainsPoint(faceBounds, point)) {
faceID = key;
*stop = YES;
}
}];
return faceID;
}
@end

View File

@ -0,0 +1,80 @@
//
// SCManagedCapturePreviewLayerController.h
// Snapchat
//
// Created by Liu Liu on 5/5/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import <SCCameraFoundation/SCManagedVideoDataSource.h>
#import <SCFoundation/SCAssertWrapper.h>
#import <SCGhostToSnappable/SCGhostToSnappableSignal.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <Metal/Metal.h>
#import <UIKit/UIKit.h>
@protocol SCCapturer;
@class LSAGLView, SCBlackCameraDetector, SCManagedCapturePreviewLayerController;
@protocol SCManagedCapturePreviewLayerControllerDelegate
- (SCBlackCameraDetector *)blackCameraDetectorForManagedCapturePreviewLayerController:
(SCManagedCapturePreviewLayerController *)controller;
- (sc_create_g2s_ticket_f)g2sTicketForManagedCapturePreviewLayerController:
(SCManagedCapturePreviewLayerController *)controller;
@end
/**
* SCManagedCapturePreviewLayerController controls display of frame in a view. The controller has 3
* different methods for this.
* AVCaptureVideoPreviewLayer: This is a feed coming straight from the camera and does not allow any
* image processing or modification of the frames displayed.
* LSAGLView: OpenGL based video for displaying video that is being processed (Lenses etc.)
* CAMetalLayer: Metal layer drawing textures on a vertex quad for display on screen.
*/
@interface SCManagedCapturePreviewLayerController : NSObject <SCManagedSampleBufferDisplayController>
@property (nonatomic, strong, readonly) UIView *view;
@property (nonatomic, strong, readonly) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (nonatomic, strong, readonly) LSAGLView *videoPreviewGLView;
@property (nonatomic, weak) id<SCManagedCapturePreviewLayerControllerDelegate> delegate;
+ (instancetype)sharedInstance;
- (void)pause;
- (void)resume;
- (UIView *)newStandInViewWithRect:(CGRect)rect;
- (void)setManagedCapturer:(id<SCCapturer>)managedCapturer;
// This method returns a token that you can hold on to. As long as the token is hold,
// an outdated view will be hold unless the app backgrounded.
- (NSString *)keepDisplayingOutdatedPreview;
// End displaying the outdated frame with an issued keep token. If there is no one holds
// any token any more, this outdated view will be flushed.
- (void)endDisplayingOutdatedPreview:(NSString *)keepToken;
// Create views for Metal, this method need to be called on the main thread.
- (void)setupPreviewLayer;
// Create render pipeline state, setup shaders for Metal, this need to be called off the main thread.
- (void)setupRenderPipeline;
- (void)applicationDidEnterBackground;
- (void)applicationWillEnterForeground;
- (void)applicationWillResignActive;
- (void)applicationDidBecomeActive;
@end

View File

@ -0,0 +1,563 @@
//
// SCManagedCapturePreviewLayerController.m
// Snapchat
//
// Created by Liu Liu on 5/5/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCapturePreviewLayerController.h"
#import "SCBlackCameraDetector.h"
#import "SCCameraTweaks.h"
#import "SCManagedCapturePreviewView.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerListener.h"
#import "SCManagedCapturerUtils.h"
#import "SCMetalUtils.h"
#import <SCFoundation/NSData+Random.h>
#import <SCFoundation/SCCoreGraphicsUtils.h>
#import <SCFoundation/SCDeviceName.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTrace.h>
#import <SCFoundation/SCTraceODPCompatible.h>
#import <SCFoundation/UIScreen+SCSafeAreaInsets.h>
#import <SCGhostToSnappable/SCGhostToSnappableSignal.h>
#import <FBKVOController/FBKVOController.h>
#define SCLogPreviewLayerInfo(fmt, ...) SCLogCoreCameraInfo(@"[PreviewLayerController] " fmt, ##__VA_ARGS__)
#define SCLogPreviewLayerWarning(fmt, ...) SCLogCoreCameraWarning(@"[PreviewLayerController] " fmt, ##__VA_ARGS__)
#define SCLogPreviewLayerError(fmt, ...) SCLogCoreCameraError(@"[PreviewLayerController] " fmt, ##__VA_ARGS__)
const static CGSize kSCManagedCapturePreviewDefaultRenderSize = {
.width = 720, .height = 1280,
};
const static CGSize kSCManagedCapturePreviewRenderSize1080p = {
.width = 1080, .height = 1920,
};
#if !TARGET_IPHONE_SIMULATOR
static NSInteger const kSCMetalCannotAcquireDrawableLimit = 2;
@interface CAMetalLayer (SCSecretFature)
// Call discardContents.
- (void)sc_secretFeature;
@end
@implementation CAMetalLayer (SCSecretFature)
- (void)sc_secretFeature
{
// "discardContents"
char buffer[] = {0x9b, 0x96, 0x8c, 0x9c, 0x9e, 0x8d, 0x9b, 0xbc, 0x90, 0x91, 0x8b, 0x9a, 0x91, 0x8b, 0x8c, 0};
unsigned long len = strlen(buffer);
for (unsigned idx = 0; idx < len; ++idx) {
buffer[idx] = ~buffer[idx];
}
SEL selector = NSSelectorFromString([NSString stringWithUTF8String:buffer]);
if ([self respondsToSelector:selector]) {
NSMethodSignature *signature = [self methodSignatureForSelector:selector];
NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:signature];
[invocation setTarget:self];
[invocation setSelector:selector];
[invocation invoke];
}
// For anyone curious, here is the actual implementation for discardContents in 10.3 (With Hopper v4, arm64)
// From glance, this seems pretty safe to call.
// void -[CAMetalLayer(CAMetalLayerPrivate) discardContents](int arg0)
// {
// *(r31 + 0xffffffffffffffe0) = r20;
// *(0xfffffffffffffff0 + r31) = r19;
// r31 = r31 + 0xffffffffffffffe0;
// *(r31 + 0x10) = r29;
// *(0x20 + r31) = r30;
// r29 = r31 + 0x10;
// r19 = *(arg0 + sign_extend_64(*(int32_t *)0x1a6300510));
// if (r19 != 0x0) {
// r0 = loc_1807079dc(*0x1a7811fc8, r19);
// r0 = _CAImageQueueConsumeUnconsumed(*(r19 + 0x10));
// r0 = _CAImageQueueFlush(*(r19 + 0x10));
// r29 = *(r31 + 0x10);
// r30 = *(0x20 + r31);
// r20 = *r31;
// r19 = *(r31 + 0x10);
// r31 = r31 + 0x20;
// r0 = loc_1807079dc(*0x1a7811fc8, zero_extend_64(0x0));
// } else {
// r29 = *(r31 + 0x10);
// r30 = *(0x20 + r31);
// r20 = *r31;
// r19 = *(r31 + 0x10);
// r31 = r31 + 0x20;
// }
// return;
// }
}
@end
#endif
@interface SCManagedCapturePreviewLayerController () <SCManagedCapturerListener>
@property (nonatomic) BOOL renderSuspended;
@end
@implementation SCManagedCapturePreviewLayerController {
SCManagedCapturePreviewView *_view;
CGSize _drawableSize;
SCQueuePerformer *_performer;
FBKVOController *_renderingKVO;
#if !TARGET_IPHONE_SIMULATOR
CAMetalLayer *_metalLayer;
id<MTLCommandQueue> _commandQueue;
id<MTLRenderPipelineState> _renderPipelineState;
CVMetalTextureCacheRef _textureCache;
dispatch_semaphore_t _commandBufferSemaphore;
// If the current view contains an outdated display (or any display)
BOOL _containOutdatedPreview;
// If we called empty outdated display already, but for some reason, hasn't emptied it yet.
BOOL _requireToFlushOutdatedPreview;
NSMutableSet *_tokenSet;
NSUInteger _cannotAcquireDrawable;
#endif
}
+ (instancetype)sharedInstance
{
static dispatch_once_t onceToken;
static SCManagedCapturePreviewLayerController *managedCapturePreviewLayerController;
dispatch_once(&onceToken, ^{
managedCapturePreviewLayerController = [[SCManagedCapturePreviewLayerController alloc] init];
});
return managedCapturePreviewLayerController;
}
- (instancetype)init
{
self = [super init];
if (self) {
#if !TARGET_IPHONE_SIMULATOR
// We only allow one renders at a time (Sorry, no double / triple buffering).
// It has to be created early here, otherwise integrity of other parts of the code is not
// guaranteed.
// TODO: I need to reason more about the initialization sequence.
_commandBufferSemaphore = dispatch_semaphore_create(1);
// Set _renderSuspended to be YES so that we won't render until it is fully setup.
_renderSuspended = YES;
_tokenSet = [NSMutableSet set];
#endif
// If the screen is less than default size, we should fallback.
CGFloat nativeScale = [UIScreen mainScreen].nativeScale;
CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;
CGSize renderSize = [SCDeviceName isIphoneX] ? kSCManagedCapturePreviewRenderSize1080p
: kSCManagedCapturePreviewDefaultRenderSize;
if (screenSize.width * nativeScale < renderSize.width) {
_drawableSize = CGSizeMake(screenSize.width * nativeScale, screenSize.height * nativeScale);
} else {
_drawableSize = SCSizeIntegral(
SCSizeCropToAspectRatio(renderSize, SCSizeGetAspectRatio(SCManagedCapturerAllScreenSize())));
}
_performer = [[SCQueuePerformer alloc] initWithLabel:"SCManagedCapturePreviewLayerController"
qualityOfService:QOS_CLASS_USER_INITIATED
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCoreCamera];
_renderingKVO = [[FBKVOController alloc] initWithObserver:self];
[_renderingKVO observe:self
keyPath:@keypath(self, renderSuspended)
options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
block:^(id observer, id object, NSDictionary *change) {
BOOL oldValue = [change[NSKeyValueChangeOldKey] boolValue];
BOOL newValue = [change[NSKeyValueChangeNewKey] boolValue];
if (oldValue != newValue) {
[[_delegate blackCameraDetectorForManagedCapturePreviewLayerController:self]
capturePreviewDidBecomeVisible:!newValue];
}
}];
}
return self;
}
- (void)pause
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SCLogPreviewLayerInfo(@"pause Metal rendering performer waiting");
[_performer performAndWait:^() {
self.renderSuspended = YES;
}];
SCLogPreviewLayerInfo(@"pause Metal rendering performer finished");
#endif
}
- (void)resume
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SCLogPreviewLayerInfo(@"resume Metal rendering performer waiting");
[_performer performAndWait:^() {
self.renderSuspended = NO;
}];
SCLogPreviewLayerInfo(@"resume Metal rendering performer finished");
#endif
}
- (void)setupPreviewLayer
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SCAssertMainThread();
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
if (!_metalLayer) {
_metalLayer = [CAMetalLayer new];
SCLogPreviewLayerInfo(@"setup metalLayer:%@", _metalLayer);
if (!_view) {
// Create capture preview view and setup the metal layer
[self view];
} else {
[_view setupMetalLayer:_metalLayer];
}
}
#endif
}
- (UIView *)newStandInViewWithRect:(CGRect)rect
{
return [self.view resizableSnapshotViewFromRect:rect afterScreenUpdates:YES withCapInsets:UIEdgeInsetsZero];
}
- (void)setupRenderPipeline
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
SCAssertNotMainThread();
id<MTLDevice> device = SCGetManagedCaptureMetalDevice();
id<MTLLibrary> shaderLibrary = [device newDefaultLibrary];
_commandQueue = [device newCommandQueue];
MTLRenderPipelineDescriptor *renderPipelineDescriptor = [MTLRenderPipelineDescriptor new];
renderPipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormatBGRA8Unorm;
renderPipelineDescriptor.vertexFunction = [shaderLibrary newFunctionWithName:@"yuv_vertex_reshape"];
renderPipelineDescriptor.fragmentFunction = [shaderLibrary newFunctionWithName:@"yuv_fragment_texture"];
MTLVertexDescriptor *vertexDescriptor = [MTLVertexDescriptor vertexDescriptor];
vertexDescriptor.attributes[0].format = MTLVertexFormatFloat2; // position
vertexDescriptor.attributes[0].offset = 0;
vertexDescriptor.attributes[0].bufferIndex = 0;
vertexDescriptor.attributes[1].format = MTLVertexFormatFloat2; // texCoords
vertexDescriptor.attributes[1].offset = 2 * sizeof(float);
vertexDescriptor.attributes[1].bufferIndex = 0;
vertexDescriptor.layouts[0].stepRate = 1;
vertexDescriptor.layouts[0].stepFunction = MTLVertexStepFunctionPerVertex;
vertexDescriptor.layouts[0].stride = 4 * sizeof(float);
renderPipelineDescriptor.vertexDescriptor = vertexDescriptor;
_renderPipelineState = [device newRenderPipelineStateWithDescriptor:renderPipelineDescriptor error:nil];
CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device, nil, &_textureCache);
_metalLayer.device = device;
_metalLayer.drawableSize = _drawableSize;
_metalLayer.pixelFormat = MTLPixelFormatBGRA8Unorm;
_metalLayer.framebufferOnly = YES; // It is default to Yes.
[_performer performAndWait:^() {
self.renderSuspended = NO;
}];
SCLogPreviewLayerInfo(@"did setup render pipeline");
#endif
}
- (UIView *)view
{
SCTraceStart();
SCAssertMainThread();
if (!_view) {
#if TARGET_IPHONE_SIMULATOR
_view = [[SCManagedCapturePreviewView alloc] initWithFrame:[UIScreen mainScreen].fixedCoordinateSpace.bounds
aspectRatio:SCSizeGetAspectRatio(_drawableSize)
metalLayer:nil];
#else
_view = [[SCManagedCapturePreviewView alloc] initWithFrame:[UIScreen mainScreen].fixedCoordinateSpace.bounds
aspectRatio:SCSizeGetAspectRatio(_drawableSize)
metalLayer:_metalLayer];
SCLogPreviewLayerInfo(@"created SCManagedCapturePreviewView:%@", _view);
#endif
}
return _view;
}
- (void)setManagedCapturer:(id<SCCapturer>)managedCapturer
{
SCTraceStart();
SCLogPreviewLayerInfo(@"setManagedCapturer:%@", managedCapturer);
if (SCDeviceSupportsMetal()) {
[managedCapturer addSampleBufferDisplayController:self context:SCCapturerContext];
}
[managedCapturer addListener:self];
}
- (void)applicationDidEnterBackground
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SCAssertMainThread();
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
SCLogPreviewLayerInfo(@"applicationDidEnterBackground waiting for performer");
[_performer performAndWait:^() {
CVMetalTextureCacheFlush(_textureCache, 0);
[_tokenSet removeAllObjects];
self.renderSuspended = YES;
}];
SCLogPreviewLayerInfo(@"applicationDidEnterBackground signal performer finishes");
#endif
}
- (void)applicationWillResignActive
{
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
SCTraceStart();
SCAssertMainThread();
#if !TARGET_IPHONE_SIMULATOR
SCLogPreviewLayerInfo(@"pause Metal rendering");
[_performer performAndWait:^() {
self.renderSuspended = YES;
}];
#endif
}
- (void)applicationDidBecomeActive
{
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
SCTraceStart();
SCAssertMainThread();
#if !TARGET_IPHONE_SIMULATOR
SCLogPreviewLayerInfo(@"resume Metal rendering waiting for performer");
[_performer performAndWait:^() {
self.renderSuspended = NO;
}];
SCLogPreviewLayerInfo(@"resume Metal rendering performer finished");
#endif
}
- (void)applicationWillEnterForeground
{
#if !TARGET_IPHONE_SIMULATOR
SCTraceStart();
SCAssertMainThread();
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
SCLogPreviewLayerInfo(@"applicationWillEnterForeground waiting for performer");
[_performer performAndWait:^() {
self.renderSuspended = NO;
if (_containOutdatedPreview && _tokenSet.count == 0) {
[self _flushOutdatedPreview];
}
}];
SCLogPreviewLayerInfo(@"applicationWillEnterForeground performer finished");
#endif
}
- (NSString *)keepDisplayingOutdatedPreview
{
SCTraceStart();
NSString *token = [NSData randomBase64EncodedStringOfLength:8];
#if !TARGET_IPHONE_SIMULATOR
SCLogPreviewLayerInfo(@"keepDisplayingOutdatedPreview waiting for performer");
[_performer performAndWait:^() {
[_tokenSet addObject:token];
}];
SCLogPreviewLayerInfo(@"keepDisplayingOutdatedPreview performer finished");
#endif
return token;
}
- (void)endDisplayingOutdatedPreview:(NSString *)keepToken
{
#if !TARGET_IPHONE_SIMULATOR
SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());
// I simply use a lock for this. If it becomes a bottleneck, I can figure something else out.
SCTraceStart();
SCLogPreviewLayerInfo(@"endDisplayingOutdatedPreview waiting for performer");
[_performer performAndWait:^() {
[_tokenSet removeObject:keepToken];
if (_tokenSet.count == 0 && _requireToFlushOutdatedPreview && _containOutdatedPreview && !_renderSuspended) {
[self _flushOutdatedPreview];
}
}];
SCLogPreviewLayerInfo(@"endDisplayingOutdatedPreview performer finished");
#endif
}
#pragma mark - SCManagedSampleBufferDisplayController
- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
#if !TARGET_IPHONE_SIMULATOR
// Just drop the frame if it is rendering.
SC_GUARD_ELSE_RUN_AND_RETURN_VALUE(dispatch_semaphore_wait(_commandBufferSemaphore, DISPATCH_TIME_NOW) == 0,
SCLogPreviewLayerInfo(@"waiting for commandBufferSemaphore signaled"), );
// Just drop the frame, simple.
[_performer performAndWait:^() {
if (_renderSuspended) {
SCLogGeneralInfo(@"Preview rendering suspends and current sample buffer is dropped");
dispatch_semaphore_signal(_commandBufferSemaphore);
return;
}
@autoreleasepool {
const BOOL isFirstPreviewFrame = !_containOutdatedPreview;
if (isFirstPreviewFrame) {
// Signal that we receieved the first frame (otherwise this will be YES already).
SCGhostToSnappableSignalDidReceiveFirstPreviewFrame();
sc_create_g2s_ticket_f func = [_delegate g2sTicketForManagedCapturePreviewLayerController:self];
SCG2SActivateManiphestTicketQueueWithTicketCreationFunction(func);
}
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
size_t pixelWidth = CVPixelBufferGetWidth(imageBuffer);
size_t pixelHeight = CVPixelBufferGetHeight(imageBuffer);
id<MTLTexture> yTexture =
SCMetalTextureFromPixelBuffer(imageBuffer, 0, MTLPixelFormatR8Unorm, _textureCache);
id<MTLTexture> cbCrTexture =
SCMetalTextureFromPixelBuffer(imageBuffer, 1, MTLPixelFormatRG8Unorm, _textureCache);
CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
SC_GUARD_ELSE_RUN_AND_RETURN(yTexture && cbCrTexture, dispatch_semaphore_signal(_commandBufferSemaphore));
id<MTLCommandBuffer> commandBuffer = _commandQueue.commandBuffer;
id<CAMetalDrawable> drawable = _metalLayer.nextDrawable;
if (!drawable) {
// Count how many times I cannot acquire drawable.
++_cannotAcquireDrawable;
if (_cannotAcquireDrawable >= kSCMetalCannotAcquireDrawableLimit) {
// Calling [_metalLayer discardContents] to flush the CAImageQueue
SCLogGeneralInfo(@"Cannot acquire drawable, reboot Metal ..");
[_metalLayer sc_secretFeature];
}
dispatch_semaphore_signal(_commandBufferSemaphore);
return;
}
_cannotAcquireDrawable = 0; // Reset to 0 in case we can acquire drawable.
MTLRenderPassDescriptor *renderPassDescriptor = [MTLRenderPassDescriptor new];
renderPassDescriptor.colorAttachments[0].texture = drawable.texture;
id<MTLRenderCommandEncoder> renderEncoder =
[commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
[renderEncoder setRenderPipelineState:_renderPipelineState];
[renderEncoder setFragmentTexture:yTexture atIndex:0];
[renderEncoder setFragmentTexture:cbCrTexture atIndex:1];
// TODO: Prob this out of the image buffer.
// 90 clock-wise rotated texture coordinate.
// Also do aspect fill.
float normalizedHeight, normalizedWidth;
if (pixelWidth * _drawableSize.width > _drawableSize.height * pixelHeight) {
normalizedHeight = 1.0;
normalizedWidth = pixelWidth * (_drawableSize.width / pixelHeight) / _drawableSize.height;
} else {
normalizedHeight = pixelHeight * (_drawableSize.height / pixelWidth) / _drawableSize.width;
normalizedWidth = 1.0;
}
const float vertices[] = {
-normalizedHeight, -normalizedWidth, 1, 1, // lower left -> upper right
normalizedHeight, -normalizedWidth, 1, 0, // lower right -> lower right
-normalizedHeight, normalizedWidth, 0, 1, // upper left -> upper left
normalizedHeight, normalizedWidth, 0, 0, // upper right -> lower left
};
[renderEncoder setVertexBytes:vertices length:sizeof(vertices) atIndex:0];
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4];
[renderEncoder endEncoding];
// I need to set a minimum duration for the drawable.
// There is a bug on iOS 10.3, if I present as soon as I can, I am keeping the GPU
// at 30fps even you swipe between views, that causes undesirable visual jarring.
// By set a minimum duration, even it is incrediably small (I tried 10ms, and here 60fps works),
// the OS seems can adjust the frame rate much better when swiping.
// This is an iOS 10.3 new method.
if ([commandBuffer respondsToSelector:@selector(presentDrawable:afterMinimumDuration:)]) {
[(id)commandBuffer presentDrawable:drawable afterMinimumDuration:(1.0 / 60)];
} else {
[commandBuffer presentDrawable:drawable];
}
[commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> commandBuffer) {
dispatch_semaphore_signal(_commandBufferSemaphore);
}];
if (isFirstPreviewFrame) {
if ([drawable respondsToSelector:@selector(addPresentedHandler:)] &&
[drawable respondsToSelector:@selector(presentedTime)]) {
[(id)drawable addPresentedHandler:^(id<MTLDrawable> presentedDrawable) {
SCGhostToSnappableSignalDidRenderFirstPreviewFrame([(id)presentedDrawable presentedTime]);
}];
} else {
[commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> commandBuffer) {
// Using CACurrentMediaTime to approximate.
SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime());
}];
}
}
// We enqueued an sample buffer to display, therefore, it contains an outdated display (to be clean up).
_containOutdatedPreview = YES;
[commandBuffer commit];
}
}];
#endif
}
- (void)flushOutdatedPreview
{
SCTraceStart();
#if !TARGET_IPHONE_SIMULATOR
// This method cannot drop frames (otherwise we will have residual on the screen).
SCLogPreviewLayerInfo(@"flushOutdatedPreview waiting for performer");
[_performer performAndWait:^() {
_requireToFlushOutdatedPreview = YES;
SC_GUARD_ELSE_RETURN(!_renderSuspended);
// Have to make sure we have no token left before return.
SC_GUARD_ELSE_RETURN(_tokenSet.count == 0);
[self _flushOutdatedPreview];
}];
SCLogPreviewLayerInfo(@"flushOutdatedPreview performer finished");
#endif
}
- (void)_flushOutdatedPreview
{
SCTraceStart();
SCAssertPerformer(_performer);
#if !TARGET_IPHONE_SIMULATOR
SCLogPreviewLayerInfo(@"flushOutdatedPreview containOutdatedPreview:%d", _containOutdatedPreview);
// I don't care if this has renderSuspended or not, assuming I did the right thing.
// Emptied, no need to do this any more on foregrounding.
SC_GUARD_ELSE_RETURN(_containOutdatedPreview);
_containOutdatedPreview = NO;
_requireToFlushOutdatedPreview = NO;
[_metalLayer sc_secretFeature];
#endif
}
#pragma mark - SCManagedCapturerListener
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer
{
SCTraceStart();
SCAssertMainThread();
// Force to load the view
[self view];
_view.videoPreviewLayer = videoPreviewLayer;
SCLogPreviewLayerInfo(@"didChangeVideoPreviewLayer:%@", videoPreviewLayer);
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView
{
SCTraceStart();
SCAssertMainThread();
// Force to load the view
[self view];
_view.videoPreviewGLView = videoPreviewGLView;
SCLogPreviewLayerInfo(@"didChangeVideoPreviewGLView:%@", videoPreviewGLView);
}
@end

View File

@ -0,0 +1,25 @@
//
// SCManagedCapturePreviewView.h
// Snapchat
//
// Created by Liu Liu on 5/5/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
@class LSAGLView;
@interface SCManagedCapturePreviewView : UIView
- (instancetype)initWithFrame:(CGRect)frame NS_UNAVAILABLE;
- (instancetype)initWithFrame:(CGRect)frame aspectRatio:(CGFloat)aspectRatio metalLayer:(CALayer *)metalLayer;
// This method is called only once in case the metalLayer is nil previously.
- (void)setupMetalLayer:(CALayer *)metalLayer;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (nonatomic, strong) LSAGLView *videoPreviewGLView;
@end

View File

@ -0,0 +1,173 @@
//
// SCManagedCapturePreviewView.m
// Snapchat
//
// Created by Liu Liu on 5/5/15.
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCapturePreviewView.h"
#import "SCCameraTweaks.h"
#import "SCManagedCapturePreviewLayerController.h"
#import "SCManagedCapturePreviewViewDebugView.h"
#import "SCMetalUtils.h"
#import <SCFoundation/SCCoreGraphicsUtils.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCTrace.h>
#import <Looksery/LSAGLView.h>
@implementation SCManagedCapturePreviewView {
CGFloat _aspectRatio;
CALayer *_containerLayer;
CALayer *_metalLayer;
SCManagedCapturePreviewViewDebugView *_debugView;
}
- (instancetype)initWithFrame:(CGRect)frame aspectRatio:(CGFloat)aspectRatio metalLayer:(CALayer *)metalLayer
{
SCTraceStart();
SCAssertMainThread();
self = [super initWithFrame:frame];
if (self) {
_aspectRatio = aspectRatio;
if (SCDeviceSupportsMetal()) {
[CATransaction begin];
[CATransaction setDisableActions:YES];
_metalLayer = metalLayer;
_metalLayer.frame = [self _layerFrame];
[self.layer insertSublayer:_metalLayer below:[self.layer sublayers][0]];
[CATransaction commit];
} else {
_containerLayer = [[CALayer alloc] init];
_containerLayer.frame = [self _layerFrame];
// Using a container layer such that the software zooming is happening on this layer
[self.layer insertSublayer:_containerLayer below:[self.layer sublayers][0]];
}
if ([self _shouldShowDebugView]) {
_debugView = [[SCManagedCapturePreviewViewDebugView alloc] init];
[self addSubview:_debugView];
}
}
return self;
}
- (void)_layoutVideoPreviewLayer
{
SCAssertMainThread();
[CATransaction begin];
[CATransaction setDisableActions:YES];
if (SCDeviceSupportsMetal()) {
_metalLayer.frame = [self _layerFrame];
} else {
if (_videoPreviewLayer) {
SCLogGeneralInfo(@"container layer frame %@, video preview layer frame %@",
NSStringFromCGRect(_containerLayer.frame), NSStringFromCGRect(_videoPreviewLayer.frame));
}
// Using bounds because we don't really care about the position at this point.
_containerLayer.frame = [self _layerFrame];
_videoPreviewLayer.frame = _containerLayer.bounds;
_videoPreviewLayer.position =
CGPointMake(CGRectGetWidth(_containerLayer.bounds) * 0.5, CGRectGetHeight(_containerLayer.bounds) * 0.5);
}
[CATransaction commit];
}
- (void)_layoutVideoPreviewGLView
{
SCCAssertMainThread();
_videoPreviewGLView.frame = [self _layerFrame];
}
- (CGRect)_layerFrame
{
CGRect frame = SCRectMakeWithCenterAndSize(
SCRectGetMid(self.bounds), SCSizeIntegral(SCSizeExpandToAspectRatio(self.bounds.size, _aspectRatio)));
CGFloat x = frame.origin.x;
x = isnan(x) ? 0.0 : (isfinite(x) ? x : INFINITY);
CGFloat y = frame.origin.y;
y = isnan(y) ? 0.0 : (isfinite(y) ? y : INFINITY);
CGFloat width = frame.size.width;
width = isnan(width) ? 0.0 : (isfinite(width) ? width : INFINITY);
CGFloat height = frame.size.height;
height = isnan(height) ? 0.0 : (isfinite(height) ? height : INFINITY);
return CGRectMake(x, y, width, height);
}
- (void)setVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer
{
SCAssertMainThread();
if (_videoPreviewLayer != videoPreviewLayer) {
[_videoPreviewLayer removeFromSuperlayer];
_videoPreviewLayer = videoPreviewLayer;
[_containerLayer addSublayer:_videoPreviewLayer];
[self _layoutVideoPreviewLayer];
}
}
- (void)setupMetalLayer:(CALayer *)metalLayer
{
SCAssert(!_metalLayer, @"_metalLayer should be nil.");
SCAssert(metalLayer, @"metalLayer must exists.");
SCAssertMainThread();
_metalLayer = metalLayer;
[self.layer insertSublayer:_metalLayer below:[self.layer sublayers][0]];
[self _layoutVideoPreviewLayer];
}
- (void)setVideoPreviewGLView:(LSAGLView *)videoPreviewGLView
{
SCAssertMainThread();
if (_videoPreviewGLView != videoPreviewGLView) {
[_videoPreviewGLView removeFromSuperview];
_videoPreviewGLView = videoPreviewGLView;
[self addSubview:_videoPreviewGLView];
[self _layoutVideoPreviewGLView];
}
}
#pragma mark - Overridden methods
- (void)layoutSubviews
{
SCAssertMainThread();
[super layoutSubviews];
[self _layoutVideoPreviewLayer];
[self _layoutVideoPreviewGLView];
[self _layoutDebugViewIfNeeded];
}
- (void)setHidden:(BOOL)hidden
{
SCAssertMainThread();
[super setHidden:hidden];
if (hidden) {
SCLogGeneralInfo(@"[SCManagedCapturePreviewView] - isHidden is being set to YES");
}
}
#pragma mark - Debug View
- (BOOL)_shouldShowDebugView
{
// Only show debug view in internal builds and tweak settings are turned on.
return SCIsInternalBuild() &&
(SCCameraTweaksEnableFocusPointObservation() || SCCameraTweaksEnableExposurePointObservation());
}
- (void)_layoutDebugViewIfNeeded
{
SCAssertMainThread();
SC_GUARD_ELSE_RETURN([self _shouldShowDebugView]);
_debugView.frame = self.bounds;
[self bringSubviewToFront:_debugView];
}
@end

View File

@ -0,0 +1,14 @@
//
// SCManagedCapturePreviewViewDebugView.h
// Snapchat
//
// Created by Jiyang Zhu on 1/19/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
@interface SCManagedCapturePreviewViewDebugView : UIView
@end

View File

@ -0,0 +1,204 @@
//
// SCManagedCapturePreviewViewDebugView.m
// Snapchat
//
// Created by Jiyang Zhu on 1/19/18.
// Copyright © 2018 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCapturePreviewViewDebugView.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerListener.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCThreadHelpers.h>
#import <SCFoundation/UIFont+AvenirNext.h>
@import CoreText;
static CGFloat const kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth = 1.0;
static CGFloat const kSCManagedCapturePreviewViewDebugViewCrossHairWidth = 20.0;
@interface SCManagedCapturePreviewViewDebugView () <SCManagedCapturerListener>
@property (assign, nonatomic) CGPoint focusPoint;
@property (assign, nonatomic) CGPoint exposurePoint;
@property (strong, nonatomic) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;
@end
@implementation SCManagedCapturePreviewViewDebugView
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
self.userInteractionEnabled = NO;
self.backgroundColor = [UIColor clearColor];
_focusPoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];
_exposurePoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];
[[SCManagedCapturer sharedInstance] addListener:self];
}
return self;
}
- (void)drawRect:(CGRect)rect
{
CGContextRef context = UIGraphicsGetCurrentContext();
if (self.focusPoint.x > 0 || self.focusPoint.y > 0) {
[self _drawCrossHairAtPoint:self.focusPoint inContext:context withColor:[UIColor greenColor] isXShaped:YES];
}
if (self.exposurePoint.x > 0 || self.exposurePoint.y > 0) {
[self _drawCrossHairAtPoint:self.exposurePoint inContext:context withColor:[UIColor yellowColor] isXShaped:NO];
}
if (self.faceBoundsByFaceID.count > 0) {
[self.faceBoundsByFaceID
enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) {
CGRect faceRect = [obj CGRectValue];
NSInteger faceID = [key integerValue];
[self _drawRectangle:faceRect
text:[NSString sc_stringWithFormat:@"ID: %@", key]
inContext:context
withColor:[UIColor colorWithRed:((faceID % 3) == 0)
green:((faceID % 3) == 1)
blue:((faceID % 3) == 2)
alpha:1.0]];
}];
}
}
- (void)dealloc
{
[[SCManagedCapturer sharedInstance] removeListener:self];
}
/**
Draw a crosshair with center point, context, color and shape.
@param isXShaped "X" or "+"
*/
- (void)_drawCrossHairAtPoint:(CGPoint)center
inContext:(CGContextRef)context
withColor:(UIColor *)color
isXShaped:(BOOL)isXShaped
{
CGFloat width = kSCManagedCapturePreviewViewDebugViewCrossHairWidth;
CGContextSetStrokeColorWithColor(context, color.CGColor);
CGContextSetLineWidth(context, kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth);
CGContextBeginPath(context);
if (isXShaped) {
CGContextMoveToPoint(context, center.x - width / 2, center.y - width / 2);
CGContextAddLineToPoint(context, center.x + width / 2, center.y + width / 2);
CGContextMoveToPoint(context, center.x + width / 2, center.y - width / 2);
CGContextAddLineToPoint(context, center.x - width / 2, center.y + width / 2);
} else {
CGContextMoveToPoint(context, center.x - width / 2, center.y);
CGContextAddLineToPoint(context, center.x + width / 2, center.y);
CGContextMoveToPoint(context, center.x, center.y - width / 2);
CGContextAddLineToPoint(context, center.x, center.y + width / 2);
}
CGContextStrokePath(context);
}
/**
Draw a rectangle, with a text on the top left.
*/
- (void)_drawRectangle:(CGRect)rect text:(NSString *)text inContext:(CGContextRef)context withColor:(UIColor *)color
{
CGContextSetStrokeColorWithColor(context, color.CGColor);
CGContextSetLineWidth(context, kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth);
CGContextBeginPath(context);
CGContextMoveToPoint(context, CGRectGetMinX(rect), CGRectGetMinY(rect));
CGContextAddLineToPoint(context, CGRectGetMinX(rect), CGRectGetMaxY(rect));
CGContextAddLineToPoint(context, CGRectGetMaxX(rect), CGRectGetMaxY(rect));
CGContextAddLineToPoint(context, CGRectGetMaxX(rect), CGRectGetMinY(rect));
CGContextAddLineToPoint(context, CGRectGetMinX(rect), CGRectGetMinY(rect));
NSMutableParagraphStyle *textStyle = [[NSMutableParagraphStyle alloc] init];
textStyle.alignment = NSTextAlignmentLeft;
NSDictionary *attributes = @{
NSFontAttributeName : [UIFont boldSystemFontOfSize:16],
NSForegroundColorAttributeName : color,
NSParagraphStyleAttributeName : textStyle
};
[text drawInRect:rect withAttributes:attributes];
CGContextStrokePath(context);
}
- (CGPoint)_convertPointOfInterest:(CGPoint)point
{
SCAssertMainThread();
CGPoint convertedPoint =
CGPointMake((1 - point.y) * CGRectGetWidth(self.bounds), point.x * CGRectGetHeight(self.bounds));
if ([[SCManagedCapturer sharedInstance] isVideoMirrored]) {
convertedPoint.x = CGRectGetWidth(self.bounds) - convertedPoint.x;
}
return convertedPoint;
}
- (NSDictionary<NSNumber *, NSValue *> *)_convertFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
SCAssertMainThread();
NSMutableDictionary<NSNumber *, NSValue *> *convertedFaceBoundsByFaceID =
[NSMutableDictionary dictionaryWithCapacity:faceBoundsByFaceID.count];
for (NSNumber *key in faceBoundsByFaceID.allKeys) {
CGRect faceBounds = [[faceBoundsByFaceID objectForKey:key] CGRectValue];
CGRect convertedBounds = CGRectMake(CGRectGetMinY(faceBounds) * CGRectGetWidth(self.bounds),
CGRectGetMinX(faceBounds) * CGRectGetHeight(self.bounds),
CGRectGetHeight(faceBounds) * CGRectGetWidth(self.bounds),
CGRectGetWidth(faceBounds) * CGRectGetHeight(self.bounds));
if (![[SCManagedCapturer sharedInstance] isVideoMirrored]) {
convertedBounds.origin.x = CGRectGetWidth(self.bounds) - CGRectGetMaxX(convertedBounds);
}
[convertedFaceBoundsByFaceID setObject:[NSValue valueWithCGRect:convertedBounds] forKey:key];
}
return convertedFaceBoundsByFaceID;
}
#pragma mark - SCManagedCapturerListener
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint
{
runOnMainThreadAsynchronouslyIfNecessary(^{
self.exposurePoint = [self _convertPointOfInterest:exposurePoint];
[self setNeedsDisplay];
});
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint
{
runOnMainThreadAsynchronouslyIfNecessary(^{
self.focusPoint = [self _convertPointOfInterest:focusPoint];
[self setNeedsDisplay];
});
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
runOnMainThreadAsynchronouslyIfNecessary(^{
self.faceBoundsByFaceID = [self _convertFaceBounds:faceBoundsByFaceID];
[self setNeedsDisplay];
});
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state
{
runOnMainThreadAsynchronouslyIfNecessary(^{
self.faceBoundsByFaceID = nil;
self.focusPoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];
self.exposurePoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];
[self setNeedsDisplay];
});
}
@end

View File

@ -0,0 +1,67 @@
//
// SCManagedCaptureSession.h
// Snapchat
//
// Created by Derek Wang on 02/03/2018.
//
#import <SCBase/SCMacros.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
/**
`SCManagedCaptureSession` is a wrapper class of `AVCaptureSession`. The purpose of this class is to provide additional
functionalities to `AVCaptureSession`.
For example, for black camera detection, we need to monitor when some method is called. Another example is that we can
treat it as a more stable version of `AVCaptureSession` by moving some `AVCaptureSession` fixing logic to this class,
and it provides reliable interfaces to the outside. That would be the next step.
It also tries to mimic the `AVCaptureSession` by implmenting some methods in `AVCaptureSession`. The original methods
in `AVCaptureSession` should not be used anymore
*/
@class SCBlackCameraDetector;
NS_ASSUME_NONNULL_BEGIN
@interface SCManagedCaptureSession : NSObject
/**
Expose avSession property
*/
@property (nonatomic, strong, readonly) AVCaptureSession *avSession;
/**
Expose avSession isRunning property for convenience.
*/
@property (nonatomic, readonly, assign) BOOL isRunning;
/**
Wrap [AVCaptureSession startRunning] method. Monitor startRunning method. [AVCaptureSession startRunning] should not be
called
*/
- (void)startRunning;
/**
Wrap [AVCaptureSession stopRunning] method. Monitor stopRunning method. [AVCaptureSession stopRunning] should not be
called
*/
- (void)stopRunning;
/**
Wrap [AVCaptureSession beginConfiguration]. Monitor beginConfiguration method
*/
- (void)beginConfiguration;
/**
Wrap [AVCaptureSession commitConfiguration]. Monitor commitConfiguration method
*/
- (void)commitConfiguration;
/**
Configurate internal AVCaptureSession with block
@params block. configuration block with AVCaptureSession as parameter
*/
- (void)performConfiguration:(void (^)(void))block;
- (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector NS_DESIGNATED_INITIALIZER;
SC_INIT_AND_NEW_UNAVAILABLE
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,74 @@
//
// SCManagedCaptureSession.m
// Snapchat
//
// Created by Derek Wang on 02/03/2018.
//
#import "SCManagedCaptureSession.h"
#import "SCBlackCameraDetector.h"
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCManagedCaptureSession () {
SCBlackCameraDetector *_blackCameraDetector;
}
@end
@implementation SCManagedCaptureSession
- (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector
{
self = [super init];
if (self) {
_avSession = [[AVCaptureSession alloc] init];
_blackCameraDetector = detector;
}
return self;
}
- (void)startRunning
{
SCTraceODPCompatibleStart(2);
[_blackCameraDetector sessionWillCallStartRunning];
[_avSession startRunning];
[_blackCameraDetector sessionDidCallStartRunning];
}
- (void)stopRunning
{
SCTraceODPCompatibleStart(2);
[_blackCameraDetector sessionWillCallStopRunning];
[_avSession stopRunning];
[_blackCameraDetector sessionDidCallStopRunning];
}
- (void)performConfiguration:(nonnull void (^)(void))block
{
SC_GUARD_ELSE_RETURN(block);
[self beginConfiguration];
block();
[self commitConfiguration];
}
- (void)beginConfiguration
{
[_avSession beginConfiguration];
}
- (void)commitConfiguration
{
SCTraceODPCompatibleStart(2);
[_blackCameraDetector sessionWillCommitConfiguration];
[_avSession commitConfiguration];
[_blackCameraDetector sessionDidCommitConfiguration];
}
- (BOOL)isRunning
{
return _avSession.isRunning;
}
@end

View File

@ -0,0 +1,23 @@
// SCManagedCapturer.h
// Snapchat
//
// Created by Liu Liu on 4/20/15.
#import "SCCapturer.h"
#import "SCManagedCapturerListener.h"
#import "SCManagedCapturerUtils.h"
#import <Foundation/Foundation.h>
/*
SCManagedCapturer is a shell class. Its job is to provide an singleton instance which follows protocol of
SCManagedCapturerImpl. The reason we use this pattern is because we are building SCManagedCapturerV2. This setup
offers
possbility for us to code V2 without breaking the existing app, and can test the new implementation via Tweak.
*/
@interface SCManagedCapturer : NSObject
+ (id<SCCapturer>)sharedInstance;
@end

View File

@ -0,0 +1,26 @@
//
// SCManagedCapturer.m
// Snapchat
//
// Created by Lin Jia on 9/28/17.
//
#import "SCManagedCapturer.h"
#import "SCCameraTweaks.h"
#import "SCCaptureCore.h"
#import "SCManagedCapturerV1.h"
@implementation SCManagedCapturer
+ (id<SCCapturer>)sharedInstance
{
static dispatch_once_t onceToken;
static id<SCCapturer> managedCapturer;
dispatch_once(&onceToken, ^{
managedCapturer = [[SCCaptureCore alloc] init];
});
return managedCapturer;
}
@end

View File

@ -0,0 +1,26 @@
//
// SCManagedCapturerARSessionHandler.h
// Snapchat
//
// Created by Xiaokang Liu on 16/03/2018.
//
// This class is used to handle the AVCaptureSession event when ARSession is enabled.
// The stopARSessionRunning will be blocked till the AVCaptureSessionDidStopRunningNotification event has been received
// successfully,
// after then we can restart AVCaptureSession gracefully.
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
@class SCCaptureResource;
@interface SCManagedCapturerARSessionHandler : NSObject
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER;
- (void)stopObserving;
- (void)stopARSessionRunning NS_AVAILABLE_IOS(11_0);
@end

View File

@ -0,0 +1,76 @@
//
// SCManagedCapturerARSessionHandler.m
// Snapchat
//
// Created by Xiaokang Liu on 16/03/2018.
//
#import "SCManagedCapturerARSessionHandler.h"
#import "SCCaptureResource.h"
#import "SCManagedCaptureSession.h"
#import <SCBase/SCAvailability.h>
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
@import ARKit;
static CGFloat const kSCManagedCapturerARKitShutdownTimeoutDuration = 2;
@interface SCManagedCapturerARSessionHandler () {
SCCaptureResource *__weak _captureResource;
dispatch_semaphore_t _arSesssionShutdownSemaphore;
}
@end
@implementation SCManagedCapturerARSessionHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super init];
if (self) {
SCAssert(captureResource, @"");
_captureResource = captureResource;
_arSesssionShutdownSemaphore = dispatch_semaphore_create(0);
}
return self;
}
- (void)stopObserving
{
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVCaptureSessionDidStopRunningNotification
object:nil];
}
- (void)stopARSessionRunning
{
SCAssertPerformer(_captureResource.queuePerformer);
SCAssert(SC_AT_LEAST_IOS_11, @"Shoule be only call from iOS 11+");
if (@available(iOS 11.0, *)) {
// ARSession stops its internal AVCaptureSession asynchronously. We listen for its callback and actually restart
// our own capture session once it's finished shutting down so the two ARSessions don't conflict.
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_completeARSessionShutdown:)
name:AVCaptureSessionDidStopRunningNotification
object:nil];
[_captureResource.arSession pause];
dispatch_semaphore_wait(
_arSesssionShutdownSemaphore,
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(kSCManagedCapturerARKitShutdownTimeoutDuration * NSEC_PER_SEC)));
}
}
- (void)_completeARSessionShutdown:(NSNotification *)note
{
// This notification is only registered for IMMEDIATELY before arkit shutdown.
// Explicitly guard that the notification object IS NOT the main session's.
SC_GUARD_ELSE_RETURN(![note.object isEqual:_captureResource.managedSession.avSession]);
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVCaptureSessionDidStopRunningNotification
object:nil];
dispatch_semaphore_signal(_arSesssionShutdownSemaphore);
}
@end

View File

@ -0,0 +1,135 @@
//#!announcer.rb
//
// SCManagedCaptuerListener
// Snapchat
//
// Created by Liu Liu on 4/23/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import "SCCapturer.h"
#import "SCManagedCaptureDevice.h"
#import "SCManagedRecordedVideo.h"
#import "SCVideoCaptureSessionInfo.h"
#import <SCFoundation/SCFuture.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
@class SCManagedCapturer;
@class SCManagedCapturerState;
@class LSAGLView;
@class SCManagedCapturerSampleMetadata;
@protocol SCManagedCapturerListener <NSObject>
@optional
// All these calbacks are invoked on main queue
// Start / stop / reset
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStartRunning:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStopRunning:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state;
// Change state methods
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeState:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state;
// The video preview layer is not maintained as a state, therefore, its change is not related to the state of
// the camera at all, listener show only manage the setup of the videoPreviewLayer.
// Since the AVCaptureVideoPreviewLayer can only attach to one AVCaptureSession per app, it is recommended you
// have a view and controller which manages the video preview layer, and for upper layer, only manage that view
// or view controller, which maintains the pointer consistency. The video preview layer is required to recreate
// every now and then because otherwise we will have cases that the old video preview layer may contain
// residual images.
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView;
// Video recording-related methods
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didBeginVideoRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didBeginAudioRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
willFinishRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session
recordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)recordedVideoFuture
videoSize:(CGSize)videoSize
placeholderImage:(UIImage *)placeholderImage;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didFinishRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session
recordedVideo:(SCManagedRecordedVideo *)recordedVideo;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didFailRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session
error:(NSError *)error;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didCancelRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didGetError:(NSError *)error
forType:(SCManagedVideoCapturerInfoType)type
session:(SCVideoCaptureSessionInfo)session;
- (void)managedCapturerDidCallLenseResume:(id<SCCapturer>)managedCapturer session:(SCVideoCaptureSessionInfo)session;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata;
// Photo methods
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
willCapturePhoto:(SCManagedCapturerState *)state
sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state;
- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state;
- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state;
// Face detection
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint;
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint;
@end

View File

@ -0,0 +1,12 @@
// Generated by the announcer.rb DO NOT EDIT!!
#import "SCManagedCapturerListener.h"
#import <Foundation/Foundation.h>
@interface SCManagedCapturerListenerAnnouncer : NSObject <SCManagedCapturerListener>
- (BOOL)addListener:(id<SCManagedCapturerListener>)listener;
- (void)removeListener:(id<SCManagedCapturerListener>)listener;
@end

View File

@ -0,0 +1,505 @@
// Generated by the announcer.rb DO NOT EDIT!!
#import "SCManagedCapturerListenerAnnouncer.h"
#include <mutex>
using std::lock_guard;
using std::mutex;
#include <vector>
using std::find;
using std::make_shared;
using std::shared_ptr;
using std::vector;
@implementation SCManagedCapturerListenerAnnouncer {
mutex _mutex;
shared_ptr<vector<__weak id<SCManagedCapturerListener>>> _listeners;
}
- (NSString *)description
{
auto listeners = atomic_load(&self->_listeners);
NSMutableString *desc = [NSMutableString string];
[desc appendFormat:@"<SCManagedCapturerListenerAnnouncer %p>: [", self];
for (int i = 0; i < listeners->size(); ++i) {
[desc appendFormat:@"%@", (*listeners)[i]];
if (i != listeners->size() - 1) {
[desc appendString:@", "];
}
}
[desc appendString:@"]"];
return desc;
}
- (BOOL)addListener:(id<SCManagedCapturerListener>)listener
{
lock_guard<mutex> lock(_mutex);
auto listeners = make_shared<vector<__weak id<SCManagedCapturerListener>>>();
if (_listeners != nil) {
// The listener we want to add already exists
if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) {
return NO;
}
for (auto &one : *_listeners) {
if (one != nil) {
listeners->push_back(one);
}
}
listeners->push_back(listener);
atomic_store(&self->_listeners, listeners);
} else {
listeners->push_back(listener);
atomic_store(&self->_listeners, listeners);
}
return YES;
}
- (void)removeListener:(id<SCManagedCapturerListener>)listener
{
lock_guard<mutex> lock(_mutex);
if (_listeners == nil) {
return;
}
// If the only item in the listener list is the one we want to remove, store it back to nil again
if (_listeners->size() == 1 && (*_listeners)[0] == listener) {
atomic_store(&self->_listeners, shared_ptr<vector<__weak id<SCManagedCapturerListener>>>());
return;
}
auto listeners = make_shared<vector<__weak id<SCManagedCapturerListener>>>();
for (auto &one : *_listeners) {
if (one != nil && one != listener) {
listeners->push_back(one);
}
}
atomic_store(&self->_listeners, listeners);
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStartRunning:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didStartRunning:)]) {
[listener managedCapturer:managedCapturer didStartRunning:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStopRunning:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didStopRunning:)]) {
[listener managedCapturer:managedCapturer didStopRunning:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didResetFromRuntimeError:)]) {
[listener managedCapturer:managedCapturer didResetFromRuntimeError:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeState:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) {
[listener managedCapturer:managedCapturer didChangeState:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) {
[listener managedCapturer:managedCapturer didChangeNightModeActive:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangePortraitModeActive:)]) {
[listener managedCapturer:managedCapturer didChangePortraitModeActive:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) {
[listener managedCapturer:managedCapturer didChangeFlashActive:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) {
[listener managedCapturer:managedCapturer didChangeLensesActive:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeARSessionActive:)]) {
[listener managedCapturer:managedCapturer didChangeARSessionActive:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) {
[listener managedCapturer:managedCapturer didChangeFlashSupportedAndTorchSupported:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) {
[listener managedCapturer:managedCapturer didChangeZoomFactor:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) {
[listener managedCapturer:managedCapturer didChangeLowLightCondition:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) {
[listener managedCapturer:managedCapturer didChangeAdjustingExposure:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) {
[listener managedCapturer:managedCapturer didChangeCaptureDevicePosition:state];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) {
[listener managedCapturer:managedCapturer didChangeVideoPreviewLayer:videoPreviewLayer];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) {
[listener managedCapturer:managedCapturer didChangeVideoPreviewGLView:videoPreviewGLView];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didBeginVideoRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didBeginVideoRecording:session:)]) {
[listener managedCapturer:managedCapturer didBeginVideoRecording:state session:session];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didBeginAudioRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didBeginAudioRecording:session:)]) {
[listener managedCapturer:managedCapturer didBeginAudioRecording:state session:session];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
willFinishRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session
recordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)recordedVideoFuture
videoSize:(CGSize)videoSize
placeholderImage:(UIImage *)placeholderImage
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:
willFinishRecording:
session:
recordedVideoFuture:
videoSize:
placeholderImage:)]) {
[listener managedCapturer:managedCapturer
willFinishRecording:state
session:session
recordedVideoFuture:recordedVideoFuture
videoSize:videoSize
placeholderImage:placeholderImage];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didFinishRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session
recordedVideo:(SCManagedRecordedVideo *)recordedVideo
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didFinishRecording:session:recordedVideo:)]) {
[listener managedCapturer:managedCapturer
didFinishRecording:state
session:session
recordedVideo:recordedVideo];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didFailRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session
error:(NSError *)error
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didFailRecording:session:error:)]) {
[listener managedCapturer:managedCapturer didFailRecording:state session:session error:error];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didCancelRecording:(SCManagedCapturerState *)state
session:(SCVideoCaptureSessionInfo)session
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didCancelRecording:session:)]) {
[listener managedCapturer:managedCapturer didCancelRecording:state session:session];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didGetError:(NSError *)error
forType:(SCManagedVideoCapturerInfoType)type
session:(SCVideoCaptureSessionInfo)session
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didGetError:forType:session:)]) {
[listener managedCapturer:managedCapturer didGetError:error forType:type session:session];
}
}
}
}
- (void)managedCapturerDidCallLenseResume:(id<SCCapturer>)managedCapturer session:(SCVideoCaptureSessionInfo)session
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturerDidCallLenseResume:session:)]) {
[listener managedCapturerDidCallLenseResume:managedCapturer session:session];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didAppendVideoSampleBuffer:sampleMetadata:)]) {
[listener managedCapturer:managedCapturer
didAppendVideoSampleBuffer:sampleBuffer
sampleMetadata:sampleMetadata];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
willCapturePhoto:(SCManagedCapturerState *)state
sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:willCapturePhoto:sampleMetadata:)]) {
[listener managedCapturer:managedCapturer willCapturePhoto:state sampleMetadata:sampleMetadata];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) {
[listener managedCapturer:managedCapturer didCapturePhoto:state];
}
}
}
}
- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) {
return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state];
}
}
}
return NO;
}
- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) {
return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state];
}
}
}
return NO;
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didDetectFaceBounds:)]) {
[listener managedCapturer:managedCapturer didDetectFaceBounds:faceBoundsByFaceID];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeExposurePoint:)]) {
[listener managedCapturer:managedCapturer didChangeExposurePoint:exposurePoint];
}
}
}
}
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedCapturerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedCapturer:didChangeFocusPoint:)]) {
[listener managedCapturer:managedCapturer didChangeFocusPoint:focusPoint];
}
}
}
}
@end

View File

@ -0,0 +1,26 @@
//
// SCRecordingMetadata.h
// Snapchat
//
#import <SCBase/SCMacros.h>
#import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface SCManagedCapturerSampleMetadata : NSObject
SC_INIT_AND_NEW_UNAVAILABLE
- (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp
fieldOfView:(float)fieldOfView NS_DESIGNATED_INITIALIZER;
@property (nonatomic, readonly) CMTime presentationTimestamp;
@property (nonatomic, readonly) float fieldOfView;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,24 @@
//
// SCRecordingMetadata.m
// Snapchat
//
#import "SCManagedCapturerSampleMetadata.h"
NS_ASSUME_NONNULL_BEGIN
@implementation SCManagedCapturerSampleMetadata
- (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp fieldOfView:(float)fieldOfView
{
self = [super init];
if (self) {
_presentationTimestamp = presentationTimestamp;
_fieldOfView = fieldOfView;
}
return self;
}
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,93 @@
// 49126048c3d19dd5b676b8d39844cf133833b67a
// Generated by the value-object.rb DO NOT EDIT!!
#import "SCManagedCaptureDevice.h"
#import <AvailabilityMacros.h>
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
@protocol SCManagedCapturerState <NSObject, NSCoding, NSCopying>
@property (nonatomic, assign, readonly) BOOL isRunning;
@property (nonatomic, assign, readonly) BOOL isNightModeActive;
@property (nonatomic, assign, readonly) BOOL isPortraitModeActive;
@property (nonatomic, assign, readonly) BOOL lowLightCondition;
@property (nonatomic, assign, readonly) BOOL adjustingExposure;
@property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition;
@property (nonatomic, assign, readonly) CGFloat zoomFactor;
@property (nonatomic, assign, readonly) BOOL flashSupported;
@property (nonatomic, assign, readonly) BOOL torchSupported;
@property (nonatomic, assign, readonly) BOOL flashActive;
@property (nonatomic, assign, readonly) BOOL torchActive;
@property (nonatomic, assign, readonly) BOOL lensesActive;
@property (nonatomic, assign, readonly) BOOL arSessionActive;
@property (nonatomic, assign, readonly) BOOL liveVideoStreaming;
@property (nonatomic, assign, readonly) BOOL lensProcessorReady;
@end
@interface SCManagedCapturerState : NSObject <SCManagedCapturerState>
@property (nonatomic, assign, readonly) BOOL isRunning;
@property (nonatomic, assign, readonly) BOOL isNightModeActive;
@property (nonatomic, assign, readonly) BOOL isPortraitModeActive;
@property (nonatomic, assign, readonly) BOOL lowLightCondition;
@property (nonatomic, assign, readonly) BOOL adjustingExposure;
@property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition;
@property (nonatomic, assign, readonly) CGFloat zoomFactor;
@property (nonatomic, assign, readonly) BOOL flashSupported;
@property (nonatomic, assign, readonly) BOOL torchSupported;
@property (nonatomic, assign, readonly) BOOL flashActive;
@property (nonatomic, assign, readonly) BOOL torchActive;
@property (nonatomic, assign, readonly) BOOL lensesActive;
@property (nonatomic, assign, readonly) BOOL arSessionActive;
@property (nonatomic, assign, readonly) BOOL liveVideoStreaming;
@property (nonatomic, assign, readonly) BOOL lensProcessorReady;
- (instancetype)initWithIsRunning:(BOOL)isRunning
isNightModeActive:(BOOL)isNightModeActive
isPortraitModeActive:(BOOL)isPortraitModeActive
lowLightCondition:(BOOL)lowLightCondition
adjustingExposure:(BOOL)adjustingExposure
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
zoomFactor:(CGFloat)zoomFactor
flashSupported:(BOOL)flashSupported
torchSupported:(BOOL)torchSupported
flashActive:(BOOL)flashActive
torchActive:(BOOL)torchActive
lensesActive:(BOOL)lensesActive
arSessionActive:(BOOL)arSessionActive
liveVideoStreaming:(BOOL)liveVideoStreaming
lensProcessorReady:(BOOL)lensProcessorReady;
@end

View File

@ -0,0 +1,359 @@
// 49126048c3d19dd5b676b8d39844cf133833b67a
// Generated by the value-object.rb DO NOT EDIT!!
#import "SCManagedCapturerState.h"
#import <SCFoundation/SCValueObjectHelpers.h>
#import <FastCoding/FastCoder.h>
@implementation SCManagedCapturerState
static ptrdiff_t sSCManagedCapturerStateOffsets[0];
static BOOL sSCManagedCapturerStateHasOffsets;
- (instancetype)initWithIsRunning:(BOOL)isRunning
isNightModeActive:(BOOL)isNightModeActive
isPortraitModeActive:(BOOL)isPortraitModeActive
lowLightCondition:(BOOL)lowLightCondition
adjustingExposure:(BOOL)adjustingExposure
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
zoomFactor:(CGFloat)zoomFactor
flashSupported:(BOOL)flashSupported
torchSupported:(BOOL)torchSupported
flashActive:(BOOL)flashActive
torchActive:(BOOL)torchActive
lensesActive:(BOOL)lensesActive
arSessionActive:(BOOL)arSessionActive
liveVideoStreaming:(BOOL)liveVideoStreaming
lensProcessorReady:(BOOL)lensProcessorReady
{
self = [super init];
if (self) {
_isRunning = isRunning;
_isNightModeActive = isNightModeActive;
_isPortraitModeActive = isPortraitModeActive;
_lowLightCondition = lowLightCondition;
_adjustingExposure = adjustingExposure;
_devicePosition = devicePosition;
_zoomFactor = zoomFactor;
_flashSupported = flashSupported;
_torchSupported = torchSupported;
_flashActive = flashActive;
_torchActive = torchActive;
_lensesActive = lensesActive;
_arSessionActive = arSessionActive;
_liveVideoStreaming = liveVideoStreaming;
_lensProcessorReady = lensProcessorReady;
}
return self;
}
#pragma mark - NSCopying
- (instancetype)copyWithZone:(NSZone *)zone
{
// Immutable object, bypass copy
return self;
}
#pragma mark - NSCoding
- (instancetype)initWithCoder:(NSCoder *)aDecoder
{
self = [super init];
if (self) {
_isRunning = [aDecoder decodeBoolForKey:@"isRunning"];
_isNightModeActive = [aDecoder decodeBoolForKey:@"isNightModeActive"];
_isPortraitModeActive = [aDecoder decodeBoolForKey:@"isPortraitModeActive"];
_lowLightCondition = [aDecoder decodeBoolForKey:@"lowLightCondition"];
_adjustingExposure = [aDecoder decodeBoolForKey:@"adjustingExposure"];
_devicePosition = (SCManagedCaptureDevicePosition)[aDecoder decodeIntegerForKey:@"devicePosition"];
_zoomFactor = [aDecoder decodeFloatForKey:@"zoomFactor"];
_flashSupported = [aDecoder decodeBoolForKey:@"flashSupported"];
_torchSupported = [aDecoder decodeBoolForKey:@"torchSupported"];
_flashActive = [aDecoder decodeBoolForKey:@"flashActive"];
_torchActive = [aDecoder decodeBoolForKey:@"torchActive"];
_lensesActive = [aDecoder decodeBoolForKey:@"lensesActive"];
_arSessionActive = [aDecoder decodeBoolForKey:@"arSessionActive"];
_liveVideoStreaming = [aDecoder decodeBoolForKey:@"liveVideoStreaming"];
_lensProcessorReady = [aDecoder decodeBoolForKey:@"lensProcessorReady"];
}
return self;
}
- (void)encodeWithCoder:(NSCoder *)aCoder
{
[aCoder encodeBool:_isRunning forKey:@"isRunning"];
[aCoder encodeBool:_isNightModeActive forKey:@"isNightModeActive"];
[aCoder encodeBool:_isPortraitModeActive forKey:@"isPortraitModeActive"];
[aCoder encodeBool:_lowLightCondition forKey:@"lowLightCondition"];
[aCoder encodeBool:_adjustingExposure forKey:@"adjustingExposure"];
[aCoder encodeInteger:(NSInteger)_devicePosition forKey:@"devicePosition"];
[aCoder encodeFloat:_zoomFactor forKey:@"zoomFactor"];
[aCoder encodeBool:_flashSupported forKey:@"flashSupported"];
[aCoder encodeBool:_torchSupported forKey:@"torchSupported"];
[aCoder encodeBool:_flashActive forKey:@"flashActive"];
[aCoder encodeBool:_torchActive forKey:@"torchActive"];
[aCoder encodeBool:_lensesActive forKey:@"lensesActive"];
[aCoder encodeBool:_arSessionActive forKey:@"arSessionActive"];
[aCoder encodeBool:_liveVideoStreaming forKey:@"liveVideoStreaming"];
[aCoder encodeBool:_lensProcessorReady forKey:@"lensProcessorReady"];
}
#pragma mark - FasterCoding
- (BOOL)preferFasterCoding
{
return YES;
}
- (void)encodeWithFasterCoder:(id<FCFasterCoder>)fasterCoder
{
[fasterCoder encodeBool:_adjustingExposure];
[fasterCoder encodeBool:_arSessionActive];
[fasterCoder encodeSInt32:_devicePosition];
[fasterCoder encodeBool:_flashActive];
[fasterCoder encodeBool:_flashSupported];
[fasterCoder encodeBool:_isNightModeActive];
[fasterCoder encodeBool:_isPortraitModeActive];
[fasterCoder encodeBool:_isRunning];
[fasterCoder encodeBool:_lensProcessorReady];
[fasterCoder encodeBool:_lensesActive];
[fasterCoder encodeBool:_liveVideoStreaming];
[fasterCoder encodeBool:_lowLightCondition];
[fasterCoder encodeBool:_torchActive];
[fasterCoder encodeBool:_torchSupported];
[fasterCoder encodeFloat64:_zoomFactor];
}
- (void)decodeWithFasterDecoder:(id<FCFasterDecoder>)fasterDecoder
{
_adjustingExposure = (BOOL)[fasterDecoder decodeBool];
_arSessionActive = (BOOL)[fasterDecoder decodeBool];
_devicePosition = (SCManagedCaptureDevicePosition)[fasterDecoder decodeSInt32];
_flashActive = (BOOL)[fasterDecoder decodeBool];
_flashSupported = (BOOL)[fasterDecoder decodeBool];
_isNightModeActive = (BOOL)[fasterDecoder decodeBool];
_isPortraitModeActive = (BOOL)[fasterDecoder decodeBool];
_isRunning = (BOOL)[fasterDecoder decodeBool];
_lensProcessorReady = (BOOL)[fasterDecoder decodeBool];
_lensesActive = (BOOL)[fasterDecoder decodeBool];
_liveVideoStreaming = (BOOL)[fasterDecoder decodeBool];
_lowLightCondition = (BOOL)[fasterDecoder decodeBool];
_torchActive = (BOOL)[fasterDecoder decodeBool];
_torchSupported = (BOOL)[fasterDecoder decodeBool];
_zoomFactor = (CGFloat)[fasterDecoder decodeFloat64];
}
- (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key
{
switch (key) {
case 15633755733674300ULL:
_adjustingExposure = (BOOL)val;
break;
case 11461798188076803ULL:
_arSessionActive = (BOOL)val;
break;
case 12833337784991002ULL:
_flashActive = (BOOL)val;
break;
case 51252237764061994ULL:
_flashSupported = (BOOL)val;
break;
case 1498048848502287ULL:
_isNightModeActive = (BOOL)val;
break;
case 56151582267629469ULL:
_isPortraitModeActive = (BOOL)val;
break;
case 12346172623874083ULL:
_isRunning = (BOOL)val;
break;
case 67168377441917657ULL:
_lensProcessorReady = (BOOL)val;
break;
case 5791542045168142ULL:
_lensesActive = (BOOL)val;
break;
case 28486888710545224ULL:
_liveVideoStreaming = (BOOL)val;
break;
case 24071673583499455ULL:
_lowLightCondition = (BOOL)val;
break;
case 40774429934225315ULL:
_torchActive = (BOOL)val;
break;
case 41333098301057670ULL:
_torchSupported = (BOOL)val;
break;
}
}
- (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key
{
switch (key) {
case 66264093189780655ULL:
_devicePosition = (SCManagedCaptureDevicePosition)val;
break;
}
}
- (void)setFloat64:(double)val forUInt64Key:(uint64_t)key
{
switch (key) {
case 61340640993537628ULL:
_zoomFactor = (CGFloat)val;
break;
}
}
+ (uint64_t)fasterCodingVersion
{
return 10319810232046341562ULL;
}
+ (uint64_t *)fasterCodingKeys
{
static uint64_t keys[] = {
15 /* Total */,
FC_ENCODE_KEY_TYPE(15633755733674300, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(11461798188076803, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(66264093189780655, FCEncodeTypeSInt32),
FC_ENCODE_KEY_TYPE(12833337784991002, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(51252237764061994, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(1498048848502287, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(56151582267629469, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(12346172623874083, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(67168377441917657, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(5791542045168142, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(28486888710545224, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(24071673583499455, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(40774429934225315, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(41333098301057670, FCEncodeTypeBool),
FC_ENCODE_KEY_TYPE(61340640993537628, FCEncodeTypeFloat64),
};
return keys;
}
#pragma mark - isEqual
- (BOOL)isEqual:(id)object
{
if (!SCObjectsIsEqual(self, object, &sSCManagedCapturerStateHasOffsets, sSCManagedCapturerStateOffsets, 15, 0)) {
return NO;
}
SCManagedCapturerState *other = (SCManagedCapturerState *)object;
if (other->_isRunning != _isRunning) {
return NO;
}
if (other->_isNightModeActive != _isNightModeActive) {
return NO;
}
if (other->_isPortraitModeActive != _isPortraitModeActive) {
return NO;
}
if (other->_lowLightCondition != _lowLightCondition) {
return NO;
}
if (other->_adjustingExposure != _adjustingExposure) {
return NO;
}
if (other->_devicePosition != _devicePosition) {
return NO;
}
if (other->_zoomFactor != _zoomFactor) {
return NO;
}
if (other->_flashSupported != _flashSupported) {
return NO;
}
if (other->_torchSupported != _torchSupported) {
return NO;
}
if (other->_flashActive != _flashActive) {
return NO;
}
if (other->_torchActive != _torchActive) {
return NO;
}
if (other->_lensesActive != _lensesActive) {
return NO;
}
if (other->_arSessionActive != _arSessionActive) {
return NO;
}
if (other->_liveVideoStreaming != _liveVideoStreaming) {
return NO;
}
if (other->_lensProcessorReady != _lensProcessorReady) {
return NO;
}
return YES;
}
- (NSUInteger)hash
{
NSUInteger subhashes[] = {
(NSUInteger)_isRunning, (NSUInteger)_isNightModeActive, (NSUInteger)_isPortraitModeActive,
(NSUInteger)_lowLightCondition, (NSUInteger)_adjustingExposure, (NSUInteger)_devicePosition,
(NSUInteger)_zoomFactor, (NSUInteger)_flashSupported, (NSUInteger)_torchSupported,
(NSUInteger)_flashActive, (NSUInteger)_torchActive, (NSUInteger)_lensesActive,
(NSUInteger)_arSessionActive, (NSUInteger)_liveVideoStreaming, (NSUInteger)_lensProcessorReady};
NSUInteger result = subhashes[0];
for (int i = 1; i < 15; i++) {
unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]);
base = (~base) + (base << 18);
base ^= (base >> 31);
base *= 21;
base ^= (base >> 11);
base += (base << 6);
base ^= (base >> 22);
result = (NSUInteger)base;
}
return result;
}
#pragma mark - Print description in console: lldb> po #{variable name}
- (NSString *)description
{
NSMutableString *desc = [NSMutableString string];
[desc appendString:@"{\n"];
[desc appendFormat:@"\tisRunning:%@\n", [@(_isRunning) description]];
[desc appendFormat:@"\tisNightModeActive:%@\n", [@(_isNightModeActive) description]];
[desc appendFormat:@"\tisPortraitModeActive:%@\n", [@(_isPortraitModeActive) description]];
[desc appendFormat:@"\tlowLightCondition:%@\n", [@(_lowLightCondition) description]];
[desc appendFormat:@"\tadjustingExposure:%@\n", [@(_adjustingExposure) description]];
[desc appendFormat:@"\tdevicePosition:%@\n", [@(_devicePosition) description]];
[desc appendFormat:@"\tzoomFactor:%@\n", [@(_zoomFactor) description]];
[desc appendFormat:@"\tflashSupported:%@\n", [@(_flashSupported) description]];
[desc appendFormat:@"\ttorchSupported:%@\n", [@(_torchSupported) description]];
[desc appendFormat:@"\tflashActive:%@\n", [@(_flashActive) description]];
[desc appendFormat:@"\ttorchActive:%@\n", [@(_torchActive) description]];
[desc appendFormat:@"\tlensesActive:%@\n", [@(_lensesActive) description]];
[desc appendFormat:@"\tarSessionActive:%@\n", [@(_arSessionActive) description]];
[desc appendFormat:@"\tliveVideoStreaming:%@\n", [@(_liveVideoStreaming) description]];
[desc appendFormat:@"\tlensProcessorReady:%@\n", [@(_lensProcessorReady) description]];
[desc appendString:@"}\n"];
return [desc copy];
}
@end

View File

@ -0,0 +1,20 @@
#import <CoreGraphics/CoreGraphics.h>
#import "SCManagedCaptureDevice.h"
interface SCManagedCapturerState
BOOL isRunning
BOOL isNightModeActive
BOOL isPortraitModeActive
BOOL lowLightCondition
BOOL adjustingExposure
enum SCManagedCaptureDevicePosition devicePosition
CGFloat zoomFactor
BOOL flashSupported
BOOL torchSupported
BOOL flashActive
BOOL torchActive
BOOL lensesActive
BOOL arSessionActive
BOOL liveVideoStreaming
BOOL lensProcessorReady
end

View File

@ -0,0 +1,46 @@
// 49126048c3d19dd5b676b8d39844cf133833b67a
// Generated by the value-object.rb DO NOT EDIT!!
#import "SCManagedCapturerState.h"
#import <AvailabilityMacros.h>
#import <Foundation/Foundation.h>
@interface SCManagedCapturerStateBuilder : NSObject
+ (instancetype)withManagedCapturerState:(id<SCManagedCapturerState>)managedCapturerState;
- (SCManagedCapturerState *)build;
- (instancetype)setIsRunning:(BOOL)isRunning;
- (instancetype)setIsNightModeActive:(BOOL)isNightModeActive;
- (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive;
- (instancetype)setLowLightCondition:(BOOL)lowLightCondition;
- (instancetype)setAdjustingExposure:(BOOL)adjustingExposure;
- (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition;
- (instancetype)setZoomFactor:(CGFloat)zoomFactor;
- (instancetype)setFlashSupported:(BOOL)flashSupported;
- (instancetype)setTorchSupported:(BOOL)torchSupported;
- (instancetype)setFlashActive:(BOOL)flashActive;
- (instancetype)setTorchActive:(BOOL)torchActive;
- (instancetype)setLensesActive:(BOOL)lensesActive;
- (instancetype)setArSessionActive:(BOOL)arSessionActive;
- (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming;
- (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady;
@end

View File

@ -0,0 +1,158 @@
// 49126048c3d19dd5b676b8d39844cf133833b67a
// Generated by the value-object.rb DO NOT EDIT!!
#import "SCManagedCapturerStateBuilder.h"
#import <SCFoundation/SCValueObjectHelpers.h>
#import <FastCoding/FastCoder.h>
@implementation SCManagedCapturerStateBuilder {
BOOL _isRunning;
BOOL _isNightModeActive;
BOOL _isPortraitModeActive;
BOOL _lowLightCondition;
BOOL _adjustingExposure;
SCManagedCaptureDevicePosition _devicePosition;
CGFloat _zoomFactor;
BOOL _flashSupported;
BOOL _torchSupported;
BOOL _flashActive;
BOOL _torchActive;
BOOL _lensesActive;
BOOL _arSessionActive;
BOOL _liveVideoStreaming;
BOOL _lensProcessorReady;
}
+ (instancetype)withManagedCapturerState:(id<SCManagedCapturerState>)managedCapturerState
{
SCManagedCapturerStateBuilder *builder = [[SCManagedCapturerStateBuilder alloc] init];
builder->_isRunning = managedCapturerState.isRunning;
builder->_isNightModeActive = managedCapturerState.isNightModeActive;
builder->_isPortraitModeActive = managedCapturerState.isPortraitModeActive;
builder->_lowLightCondition = managedCapturerState.lowLightCondition;
builder->_adjustingExposure = managedCapturerState.adjustingExposure;
builder->_devicePosition = managedCapturerState.devicePosition;
builder->_zoomFactor = managedCapturerState.zoomFactor;
builder->_flashSupported = managedCapturerState.flashSupported;
builder->_torchSupported = managedCapturerState.torchSupported;
builder->_flashActive = managedCapturerState.flashActive;
builder->_torchActive = managedCapturerState.torchActive;
builder->_lensesActive = managedCapturerState.lensesActive;
builder->_arSessionActive = managedCapturerState.arSessionActive;
builder->_liveVideoStreaming = managedCapturerState.liveVideoStreaming;
builder->_lensProcessorReady = managedCapturerState.lensProcessorReady;
return builder;
}
- (SCManagedCapturerState *)build
{
return [[SCManagedCapturerState alloc] initWithIsRunning:_isRunning
isNightModeActive:_isNightModeActive
isPortraitModeActive:_isPortraitModeActive
lowLightCondition:_lowLightCondition
adjustingExposure:_adjustingExposure
devicePosition:_devicePosition
zoomFactor:_zoomFactor
flashSupported:_flashSupported
torchSupported:_torchSupported
flashActive:_flashActive
torchActive:_torchActive
lensesActive:_lensesActive
arSessionActive:_arSessionActive
liveVideoStreaming:_liveVideoStreaming
lensProcessorReady:_lensProcessorReady];
}
- (instancetype)setIsRunning:(BOOL)isRunning
{
_isRunning = isRunning;
return self;
}
- (instancetype)setIsNightModeActive:(BOOL)isNightModeActive
{
_isNightModeActive = isNightModeActive;
return self;
}
- (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive
{
_isPortraitModeActive = isPortraitModeActive;
return self;
}
- (instancetype)setLowLightCondition:(BOOL)lowLightCondition
{
_lowLightCondition = lowLightCondition;
return self;
}
- (instancetype)setAdjustingExposure:(BOOL)adjustingExposure
{
_adjustingExposure = adjustingExposure;
return self;
}
- (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
{
_devicePosition = devicePosition;
return self;
}
- (instancetype)setZoomFactor:(CGFloat)zoomFactor
{
_zoomFactor = zoomFactor;
return self;
}
- (instancetype)setFlashSupported:(BOOL)flashSupported
{
_flashSupported = flashSupported;
return self;
}
- (instancetype)setTorchSupported:(BOOL)torchSupported
{
_torchSupported = torchSupported;
return self;
}
- (instancetype)setFlashActive:(BOOL)flashActive
{
_flashActive = flashActive;
return self;
}
- (instancetype)setTorchActive:(BOOL)torchActive
{
_torchActive = torchActive;
return self;
}
- (instancetype)setLensesActive:(BOOL)lensesActive
{
_lensesActive = lensesActive;
return self;
}
- (instancetype)setArSessionActive:(BOOL)arSessionActive
{
_arSessionActive = arSessionActive;
return self;
}
- (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming
{
_liveVideoStreaming = liveVideoStreaming;
return self;
}
- (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady
{
_lensProcessorReady = lensProcessorReady;
return self;
}
@end

View File

@ -0,0 +1,36 @@
//
// SCManagedCapturerUtils.h
// Snapchat
//
// Created by Chao Pang on 10/4/17.
//
#import <SCBase/SCMacros.h>
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
SC_EXTERN_C_BEGIN
extern const CGFloat kSCIPhoneXCapturedImageVideoCropRatio;
extern CGFloat SCManagedCapturedImageAndVideoAspectRatio(void);
extern CGSize SCManagedCapturerAllScreenSize(void);
extern CGSize SCAsyncImageCapturePlaceholderViewSize(void);
extern CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio);
extern UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio);
extern void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation,
CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight);
extern BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio);
extern CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight);
extern CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation,
CGFloat aspectRatio);
SC_EXTERN_C_END

View File

@ -0,0 +1,153 @@
//
// SCManagedCapturerUtils.m
// Snapchat
//
// Created by Chao Pang on 10/4/17.
//
#import "SCManagedCapturerUtils.h"
#import "SCCaptureCommon.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCCoreGraphicsUtils.h>
#import <SCFoundation/SCDeviceName.h>
#import <SCFoundation/UIScreen+SCSafeAreaInsets.h>
// This is to calculate the crop ratio for generating the image shown in Preview page
// Check https://snapchat.quip.com/lU3kAoDxaAFG for our design.
const CGFloat kSCIPhoneXCapturedImageVideoCropRatio = (397.0 * 739.0) / (375.0 * 812.0);
CGFloat SCManagedCapturedImageAndVideoAspectRatio(void)
{
static dispatch_once_t onceToken;
static CGFloat aspectRatio;
dispatch_once(&onceToken, ^{
CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;
UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets];
aspectRatio = SCSizeGetAspectRatio(
CGSizeMake(screenSize.width, screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom));
});
return aspectRatio;
}
CGSize SCManagedCapturerAllScreenSize(void)
{
static CGSize size;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;
// This logic is complicated because we need to handle iPhone X properly.
// See https://snapchat.quip.com/lU3kAoDxaAFG for our design.
UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets];
UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets];
// This really is just some coordinate computations:
// We know in preview, our size is (screenWidth, screenHeight - topInset - bottomInset)
// We know that when the preview image is in the camera screen, the height is screenHeight - visualTopInset,
// thus, we need to figure out in camera screen, what's the bleed-over width should be
// (screenWidth * (screenHeight - visualTopInset) / (screenHeight - topInset - bottomInset)
size = CGSizeMake(roundf(screenSize.width * (screenSize.height - visualSafeInsets.top) /
(screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)),
screenSize.height);
});
return size;
}
CGSize SCAsyncImageCapturePlaceholderViewSize(void)
{
static CGSize size;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;
UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets];
UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets];
size = CGSizeMake(roundf((screenSize.height - visualSafeInsets.top) * screenSize.width /
(screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)),
screenSize.height - visualSafeInsets.top);
});
return size;
}
CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio)
{
SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @"");
switch (orientation) {
case UIImageOrientationLeft:
case UIImageOrientationRight:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
return 1.0 / aspectRatio;
default:
return aspectRatio;
}
}
UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio)
{
if (SCNeedsCropImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio)) {
CGImageRef croppedImageRef =
SCCreateCroppedImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio);
UIImage *croppedImage =
[UIImage imageWithCGImage:croppedImageRef scale:image.scale orientation:image.imageOrientation];
CGImageRelease(croppedImageRef);
return croppedImage;
} else {
return image;
}
}
void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation,
CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight)
{
SCCAssert(outputWidth != NULL && outputHeight != NULL, @"");
aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio);
if (inputWidth > roundf(inputHeight * aspectRatio)) {
*outputHeight = inputHeight;
*outputWidth = roundf(*outputHeight * aspectRatio);
} else {
*outputWidth = inputWidth;
*outputHeight = roundf(*outputWidth / aspectRatio);
}
}
BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio)
{
if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) {
return NO;
}
aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio);
size_t width = CGImageGetWidth(image);
size_t height = CGImageGetHeight(image);
return (width != roundf(height * aspectRatio));
}
CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight)
{
if ([SCDeviceName isIphoneX]) {
// X is pushed all the way over to crop out top section but none of bottom
CGFloat x = (imageWidth - croppedWidth);
// Crop y symmetrically.
CGFloat y = roundf((imageHeight - croppedHeight) / 2.0);
return CGRectMake(x, y, croppedWidth, croppedHeight);
}
return CGRectMake((imageWidth - croppedWidth) / 2, (imageHeight - croppedHeight) / 2, croppedWidth, croppedHeight);
}
CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio)
{
SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @"");
size_t width = CGImageGetWidth(image);
size_t height = CGImageGetHeight(image);
size_t croppedWidth, croppedHeight;
if ([SCDeviceName isIphoneX]) {
size_t adjustedWidth = (size_t)(width * kSCIPhoneXCapturedImageVideoCropRatio);
size_t adjustedHeight = (size_t)(height * kSCIPhoneXCapturedImageVideoCropRatio);
SCCropImageSizeToAspectRatio(adjustedWidth, adjustedHeight, orientation, aspectRatio, &croppedWidth,
&croppedHeight);
} else {
SCCropImageSizeToAspectRatio(width, height, orientation, aspectRatio, &croppedWidth, &croppedHeight);
}
CGRect cropRect = SCCalculateRectToCrop(width, height, croppedWidth, croppedHeight);
return CGImageCreateWithImageInRect(image, cropRect);
}

View File

@ -0,0 +1,57 @@
//
// SCManagedCapturer.h
// Snapchat
//
// Created by Liu Liu on 4/20/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import "SCCaptureCommon.h"
#import "SCCapturer.h"
#import <SCFoundation/SCTraceODPCompatible.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
/**
* Manage AVCaptureSession with SCManagedCapturerV1
*
* In phantom, there are a lot of places we use AVCaptureSession. However, since for each app, only one session
* can run at the same time, we need some kind of management for the capture session.
*
* SCManagedCapturerV1 manages the state of capture session in following ways:
*
* All operations in SCManagedCapturerV1 are handled on a serial queue, to ensure its sequence. All callbacks (either
* on the listener or the completion handler) are on the main thread. The state of SCManagedCapturerV1 are conveniently
* maintained in a SCManagedCapturerState object, which is immutable and can be passed across threads, it mains a
* consistent view of the capture session, if it is not delayed (thus, the state may deliver as current active device
* is back camera on main thread, but in reality, on the serial queue, the active device switched to the front camera
* already. However, this is OK because state.devicePosition will be back camera and with all its setup at that time.
* Note that it is impossible to have an on-time view of the state across threads without blocking each other).
*
* For main use cases, you setup the capturer, add the preview layer, and then can call capture still image
* or record video, and SCManagedCapturerV1 will do the rest (make sure it actually captures image / video, recover
* from error, or setup our more advanced image / video post-process).
*
* The key classes that drive the recording flow are SCManagedVideoStreamer and SCManagedVideoFileStreamer which
* conform to SCManagedVideoDataSource. They will stream images to consumers conforming to
* SCManagedVideoDataSourceListener
* such as SCManagedLensesProcessor, SCManagedDeviceCapacityAnalyzer, SCManagedVideoScanner and ultimately
* SCManagedVideoCapturer and SCManagedStillImageCapturer which record the final output.
*
*/
@class SCCaptureResource;
extern NSString *const kSCLensesTweaksDidChangeFileInput;
@interface SCManagedCapturerV1 : NSObject <SCCapturer, SCTimeProfilable>
+ (SCManagedCapturerV1 *)sharedInstance;
/*
The following APIs are reserved to be only used for SCCaptureCore aka managedCapturerV2.
*/
- (instancetype)initWithResource:(SCCaptureResource *)resource;
@end

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,20 @@
//
// SCManagedCapturerV1_Private.h
// Snapchat
//
// Created by Jingtian Yang on 20/12/2017.
//
#import "SCManagedCapturerV1.h"
@interface SCManagedCapturerV1 ()
- (SCCaptureResource *)captureResource;
- (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler;
- (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context;
@end

View File

@ -0,0 +1,32 @@
//
// SCManagedDeviceCapacityAnalyzer.h
// Snapchat
//
// Created by Liu Liu on 5/1/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import "SCManagedDeviceCapacityAnalyzerListener.h"
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
#import <Foundation/Foundation.h>
@class SCManagedCaptureDevice;
@protocol SCPerforming;
extern NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHigh;
@interface SCManagedDeviceCapacityAnalyzer : NSObject <SCManagedVideoDataSourceListener>
@property (nonatomic, assign) BOOL lowLightConditionEnabled;
- (instancetype)initWithPerformer:(id<SCPerforming>)performer;
- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;
- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;
- (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice;
- (void)removeFocusListener;
@end

View File

@ -0,0 +1,294 @@
//
// SCManagedDeviceCapacityAnalyzer.m
// Snapchat
//
// Created by Liu Liu on 5/1/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import "SCManagedDeviceCapacityAnalyzer.h"
#import "SCCameraSettingUtils.h"
#import "SCCameraTweaks.h"
#import "SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h"
#import "SCManagedCaptureDevice.h"
#import "SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h"
#import <SCFoundation/SCAppEnvironment.h>
#import <SCFoundation/SCDeviceName.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCPerforming.h>
#import <SCFoundation/SCTrace.h>
#import <FBKVOController/FBKVOController.h>
@import ImageIO;
@import QuartzCore;
NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI = 500;
NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S = 800;
NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7 = 640;
NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8 = 800;
// After this much frames we haven't changed exposure time or ISO, we will assume that the adjustingExposure is ended.
static NSInteger const kExposureUnchangedHighWatermark = 5;
// If deadline reached, and we still haven't reached high watermark yet, we will consult the low watermark and at least
// give the system a chance to take not-so-great pictures.
static NSInteger const kExposureUnchangedLowWatermark = 1;
static NSTimeInterval const kExposureUnchangedDeadline = 0.2;
// It seems that between ISO 500 to 640, the brightness value is always somewhere around -0.4 to -0.5.
// Therefore, this threshold probably will work fine.
static float const kBrightnessValueThreshold = -2.25;
// Give some margins between recognized as bright enough and not enough light.
// If the brightness is lower than kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval,
// and then we count the frame as low light frame. Only if the brightness is higher than
// kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval, we think that we
// have enough light, and reset low light frame count to 0. 0.5 is choosing because in dark
// environment, the brightness value changes +-0.3 with minor orientation changes.
static float const kBrightnessValueThresholdConfidenceInterval = 0.5;
// If we are at good light condition for 5 frames, ready to change back
static NSInteger const kLowLightBoostUnchangedLowWatermark = 7;
// Requires we are at low light condition for ~2 seconds (assuming 20~30fps)
static NSInteger const kLowLightBoostUnchangedHighWatermark = 25;
static NSInteger const kSCLightingConditionDecisionWatermark = 15; // For 30 fps, it is 0.5 second
static float const kSCLightingConditionNormalThreshold = 0;
static float const kSCLightingConditionDarkThreshold = -3;
@implementation SCManagedDeviceCapacityAnalyzer {
float _lastExposureTime;
int _lastISOSpeedRating;
NSTimeInterval _lastAdjustingExposureStartTime;
NSInteger _lowLightBoostLowLightCount;
NSInteger _lowLightBoostEnoughLightCount;
NSInteger _exposureUnchangedCount;
NSInteger _maxISOPresetHigh;
NSInteger _normalLightingConditionCount;
NSInteger _darkLightingConditionCount;
NSInteger _extremeDarkLightingConditionCount;
SCCapturerLightingConditionType _lightingCondition;
BOOL _lowLightCondition;
BOOL _adjustingExposure;
SCManagedDeviceCapacityAnalyzerListenerAnnouncer *_announcer;
FBKVOController *_observeController;
id<SCPerforming> _performer;
float
_lastBrightnessToLog; // Remember last logged brightness, only log again if it changes greater than a threshold
}
- (instancetype)initWithPerformer:(id<SCPerforming>)performer
{
SCTraceStart();
self = [super init];
if (self) {
_performer = performer;
_maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI;
if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone8orNewer]) {
_maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8;
} else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone7orNewer]) {
_maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7;
} else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer]) {
// iPhone 6S supports higher ISO rate for video recording, accommadating that.
_maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S;
}
_announcer = [[SCManagedDeviceCapacityAnalyzerListenerAnnouncer alloc] init];
_observeController = [[FBKVOController alloc] initWithObserver:self];
}
return self;
}
- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
{
SCTraceStart();
[_announcer addListener:listener];
}
- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
{
SCTraceStart();
[_announcer removeListener:listener];
}
- (void)setLowLightConditionEnabled:(BOOL)lowLightConditionEnabled
{
SCTraceStart();
if (_lowLightConditionEnabled != lowLightConditionEnabled) {
_lowLightConditionEnabled = lowLightConditionEnabled;
if (!lowLightConditionEnabled) {
_lowLightBoostLowLightCount = 0;
_lowLightBoostEnoughLightCount = 0;
_lowLightCondition = NO;
[_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition];
}
}
}
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
{
SCTraceStart();
SampleBufferMetadata metadata = {
.isoSpeedRating = _lastISOSpeedRating, .brightness = 0, .exposureTime = _lastExposureTime,
};
retrieveSampleBufferMetadata(sampleBuffer, &metadata);
if ((SCIsDebugBuild() || SCIsMasterBuild())
// Enable this on internal build only (excluding alpha)
&& fabs(metadata.brightness - _lastBrightnessToLog) > 0.5f) {
// Log only when brightness change is greater than 0.5
_lastBrightnessToLog = metadata.brightness;
SCLogCoreCameraInfo(@"ExposureTime: %f, ISO: %ld, Brightness: %f", metadata.exposureTime,
(long)metadata.isoSpeedRating, metadata.brightness);
}
[self _automaticallyDetectAdjustingExposure:metadata.exposureTime ISOSpeedRating:metadata.isoSpeedRating];
_lastExposureTime = metadata.exposureTime;
_lastISOSpeedRating = metadata.isoSpeedRating;
if (!_adjustingExposure && _lastISOSpeedRating <= _maxISOPresetHigh &&
_lowLightConditionEnabled) { // If we are not recording, we are not at ISO higher than we needed
[self _automaticallyDetectLowLightCondition:metadata.brightness];
}
[self _automaticallyDetectLightingConditionWithBrightness:metadata.brightness];
[_announcer managedDeviceCapacityAnalyzer:self didChangeBrightness:metadata.brightness];
}
- (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice
{
SCTraceStart();
[_observeController observe:captureDevice.device
keyPath:@keypath(captureDevice.device, adjustingFocus)
options:NSKeyValueObservingOptionNew
action:@selector(_adjustingFocusingChanged:)];
}
- (void)removeFocusListener
{
SCTraceStart();
[_observeController unobserveAll];
}
#pragma mark - Private methods
- (void)_automaticallyDetectAdjustingExposure:(float)currentExposureTime ISOSpeedRating:(NSInteger)currentISOSpeedRating
{
SCTraceStart();
if (currentISOSpeedRating != _lastISOSpeedRating || fabsf(currentExposureTime - _lastExposureTime) > FLT_MIN) {
_exposureUnchangedCount = 0;
} else {
++_exposureUnchangedCount;
}
NSTimeInterval currentTime = CACurrentMediaTime();
if (_exposureUnchangedCount >= kExposureUnchangedHighWatermark ||
(currentTime - _lastAdjustingExposureStartTime > kExposureUnchangedDeadline &&
_exposureUnchangedCount >= kExposureUnchangedLowWatermark)) {
// The exposure values haven't changed for kExposureUnchangedHighWatermark times, considering the adjustment
// as done. Otherwise, if we waited long enough, and the exposure unchange count at least reached low
// watermark, we will call it done and give it a shot.
if (_adjustingExposure) {
_adjustingExposure = NO;
SCLogGeneralInfo(@"Adjusting exposure is done, unchanged count: %zd", _exposureUnchangedCount);
[_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure];
}
} else {
// Otherwise signal that we have adjustments on exposure
if (!_adjustingExposure) {
_adjustingExposure = YES;
_lastAdjustingExposureStartTime = currentTime;
[_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure];
}
}
}
- (void)_automaticallyDetectLowLightCondition:(float)brightness
{
SCTraceStart();
if (!_lowLightCondition && _lastISOSpeedRating == _maxISOPresetHigh) {
// If we are at the stage that we need to use higher ISO (because current ISO is maxed out)
// and the brightness is lower than the threshold
if (brightness < kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval) {
// Either count how many frames like this continuously we encountered
// Or if reached the watermark, change the low light boost mode
if (_lowLightBoostLowLightCount >= kLowLightBoostUnchangedHighWatermark) {
_lowLightCondition = YES;
[_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition];
} else {
++_lowLightBoostLowLightCount;
}
} else if (brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) {
// If the brightness is consistently better, reset the low light boost unchanged count to 0
_lowLightBoostLowLightCount = 0;
}
} else if (_lowLightCondition) {
// Check the current ISO to see if we can disable low light boost
if (_lastISOSpeedRating <= _maxISOPresetHigh &&
brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) {
if (_lowLightBoostEnoughLightCount >= kLowLightBoostUnchangedLowWatermark) {
_lowLightCondition = NO;
[_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition];
_lowLightBoostEnoughLightCount = 0;
} else {
++_lowLightBoostEnoughLightCount;
}
}
}
}
- (void)_adjustingFocusingChanged:(NSDictionary *)change
{
SCTraceStart();
BOOL adjustingFocus = [change[NSKeyValueChangeNewKey] boolValue];
[_performer perform:^{
[_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingFocus:adjustingFocus];
}];
}
- (void)_automaticallyDetectLightingConditionWithBrightness:(float)brightness
{
if (brightness >= kSCLightingConditionNormalThreshold) {
if (_normalLightingConditionCount > kSCLightingConditionDecisionWatermark) {
if (_lightingCondition != SCCapturerLightingConditionTypeNormal) {
_lightingCondition = SCCapturerLightingConditionTypeNormal;
[_announcer managedDeviceCapacityAnalyzer:self
didChangeLightingCondition:SCCapturerLightingConditionTypeNormal];
}
} else {
_normalLightingConditionCount++;
}
_darkLightingConditionCount = 0;
_extremeDarkLightingConditionCount = 0;
} else if (brightness >= kSCLightingConditionDarkThreshold) {
if (_darkLightingConditionCount > kSCLightingConditionDecisionWatermark) {
if (_lightingCondition != SCCapturerLightingConditionTypeDark) {
_lightingCondition = SCCapturerLightingConditionTypeDark;
[_announcer managedDeviceCapacityAnalyzer:self
didChangeLightingCondition:SCCapturerLightingConditionTypeDark];
}
} else {
_darkLightingConditionCount++;
}
_normalLightingConditionCount = 0;
_extremeDarkLightingConditionCount = 0;
} else {
if (_extremeDarkLightingConditionCount > kSCLightingConditionDecisionWatermark) {
if (_lightingCondition != SCCapturerLightingConditionTypeExtremeDark) {
_lightingCondition = SCCapturerLightingConditionTypeExtremeDark;
[_announcer managedDeviceCapacityAnalyzer:self
didChangeLightingCondition:SCCapturerLightingConditionTypeExtremeDark];
}
} else {
_extremeDarkLightingConditionCount++;
}
_normalLightingConditionCount = 0;
_darkLightingConditionCount = 0;
}
}
@end

View File

@ -0,0 +1,20 @@
//
// SCManagedDeviceCapacityAnalyzerHandler.h
// Snapchat
//
// Created by Jingtian Yang on 11/12/2017.
//
#import "SCManagedDeviceCapacityAnalyzerListener.h"
#import <Foundation/Foundation.h>
@class SCCaptureResource;
@interface SCManagedDeviceCapacityAnalyzerHandler : NSObject <SCManagedDeviceCapacityAnalyzerListener>
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;
@end

View File

@ -0,0 +1,72 @@
//
// SCManagedDeviceCapacityAnalyzerHandler.m
// Snapchat
//
// Created by Jingtian Yang on 11/12/2017.
//
#import "SCManagedDeviceCapacityAnalyzerHandler.h"
#import "SCCaptureResource.h"
#import "SCManagedCapturer.h"
#import "SCManagedCapturerLogging.h"
#import "SCManagedCapturerState.h"
#import "SCManagedCapturerStateBuilder.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCThreadHelpers.h>
#import <SCFoundation/SCTraceODPCompatible.h>
@interface SCManagedDeviceCapacityAnalyzerHandler () {
__weak SCCaptureResource *_captureResource;
}
@end
@implementation SCManagedDeviceCapacityAnalyzerHandler
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
{
self = [super init];
if (self) {
SCAssert(captureResource, @"");
_captureResource = captureResource;
}
return self;
}
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeLowLightCondition:(BOOL)lowLightCondition
{
SCTraceODPCompatibleStart(2);
SCLogCapturerInfo(@"Change Low Light Condition %d", lowLightCondition);
[_captureResource.queuePerformer perform:^{
_captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]
setLowLightCondition:lowLightCondition] build];
SCManagedCapturerState *state = [_captureResource.state copy];
runOnMainThreadAsynchronously(^{
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeLowLightCondition:state];
});
}];
}
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeAdjustingExposure:(BOOL)adjustingExposure
{
SCTraceODPCompatibleStart(2);
SCLogCapturerInfo(@"Capacity Analyzer Changes adjustExposure %d", adjustingExposure);
[_captureResource.queuePerformer perform:^{
_captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]
setAdjustingExposure:adjustingExposure] build];
SCManagedCapturerState *state = [_captureResource.state copy];
runOnMainThreadAsynchronously(^{
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
didChangeAdjustingExposure:state];
});
}];
}
@end

View File

@ -0,0 +1,35 @@
//#!announcer.rb
// SCManagedDeviceCapacityAnalyzerListener.h
// Snapchat
//
// Created by Liu Liu on 5/4/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import "SCCapturerDefines.h"
#import <Foundation/Foundation.h>
@class SCManagedDeviceCapacityAnalyzer;
@protocol SCManagedDeviceCapacityAnalyzerListener <NSObject>
@optional
// These callbacks happen on a internal queue
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeLowLightCondition:(BOOL)lowLightCondition;
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeAdjustingExposure:(BOOL)adjustingExposure;
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeAdjustingFocus:(BOOL)adjustingFocus;
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeBrightness:(float)adjustingBrightness;
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition;
@end

View File

@ -0,0 +1,12 @@
// Generated by the announcer.rb DO NOT EDIT!!
#import "SCManagedDeviceCapacityAnalyzerListener.h"
#import <Foundation/Foundation.h>
@interface SCManagedDeviceCapacityAnalyzerListenerAnnouncer : NSObject <SCManagedDeviceCapacityAnalyzerListener>
- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;
- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;
@end

View File

@ -0,0 +1,146 @@
// Generated by the announcer.rb DO NOT EDIT!!
#import "SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h"
#include <mutex>
using std::lock_guard;
using std::mutex;
#include <vector>
using std::find;
using std::make_shared;
using std::shared_ptr;
using std::vector;
@implementation SCManagedDeviceCapacityAnalyzerListenerAnnouncer {
mutex _mutex;
shared_ptr<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>> _listeners;
}
- (NSString *)description
{
auto listeners = atomic_load(&self->_listeners);
NSMutableString *desc = [NSMutableString string];
[desc appendFormat:@"<SCManagedDeviceCapacityAnalyzerListenerAnnouncer %p>: [", self];
for (int i = 0; i < listeners->size(); ++i) {
[desc appendFormat:@"%@", (*listeners)[i]];
if (i != listeners->size() - 1) {
[desc appendString:@", "];
}
}
[desc appendString:@"]"];
return desc;
}
- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
{
lock_guard<mutex> lock(_mutex);
auto listeners = make_shared<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>>();
if (_listeners != nil) {
// The listener we want to add already exists
if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) {
return;
}
for (auto &one : *_listeners) {
if (one != nil) {
listeners->push_back(one);
}
}
listeners->push_back(listener);
atomic_store(&self->_listeners, listeners);
} else {
listeners->push_back(listener);
atomic_store(&self->_listeners, listeners);
}
}
- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
{
lock_guard<mutex> lock(_mutex);
if (_listeners == nil) {
return;
}
// If the only item in the listener list is the one we want to remove, store it back to nil again
if (_listeners->size() == 1 && (*_listeners)[0] == listener) {
atomic_store(&self->_listeners, shared_ptr<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>>());
return;
}
auto listeners = make_shared<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>>();
for (auto &one : *_listeners) {
if (one != nil && one != listener) {
listeners->push_back(one);
}
}
atomic_store(&self->_listeners, listeners);
}
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeLowLightCondition:(BOOL)lowLightCondition
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLowLightCondition:)]) {
[listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer
didChangeLowLightCondition:lowLightCondition];
}
}
}
}
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeAdjustingExposure:(BOOL)adjustingExposure
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingExposure:)]) {
[listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer
didChangeAdjustingExposure:adjustingExposure];
}
}
}
}
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeAdjustingFocus:(BOOL)adjustingFocus
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingFocus:)]) {
[listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer
didChangeAdjustingFocus:adjustingFocus];
}
}
}
}
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeBrightness:(float)adjustingBrightness
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeBrightness:)]) {
[listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer
didChangeBrightness:adjustingBrightness];
}
}
}
}
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition
{
auto listeners = atomic_load(&self->_listeners);
if (listeners) {
for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) {
if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLightingCondition:)]) {
[listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer
didChangeLightingCondition:lightingCondition];
}
}
}
}
@end

View File

@ -0,0 +1,25 @@
//
// SCManagedDroppedFramesReporter.h
// Snapchat
//
// Created by Michel Loenngren on 3/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedCapturerListener.h"
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
#import <Foundation/Foundation.h>
/*
Conforms to SCManagedVideoDataSourceListener and records frame rate statistics
during recording.
*/
@interface SCManagedDroppedFramesReporter : NSObject <SCManagedVideoDataSourceListener, SCManagedCapturerListener>
- (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied;
- (void)didChangeCaptureDevicePosition;
@end

View File

@ -0,0 +1,86 @@
//
// SCManagedDroppedFramesReporter.m
// Snapchat
//
// Created by Michel Loenngren on 3/21/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCManagedDroppedFramesReporter.h"
#import "SCCameraTweaks.h"
#import "SCManagedCapturerState.h"
#import <SCFoundation/SCBackgroundTaskMonitor.h>
#import <SCFoundation/SCLog.h>
#import <SCFrameRate/SCFrameRateEntry.h>
#import <SCFrameRate/SCVideoFrameDropCounter.h>
#import <SCLogger/SCCameraMetrics.h>
#import <SCLogger/SCLogger.h>
CGFloat const kSCCaptureTargetFramerate = 30;
@interface SCManagedDroppedFramesReporter ()
@property (nonatomic) SCVideoFrameDropCounter *frameDropCounter;
@end
@implementation SCManagedDroppedFramesReporter {
SCVideoFrameDropCounter *_frameDropCounter;
NSUInteger _droppedFrames;
}
- (SCVideoFrameDropCounter *)frameDropCounter
{
if (_frameDropCounter == nil) {
_frameDropCounter = [[SCVideoFrameDropCounter alloc] initWithTargetFramerate:kSCCaptureTargetFramerate];
_droppedFrames = 0;
}
return _frameDropCounter;
}
- (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied
{
if (_frameDropCounter == nil) {
return;
}
NSMutableDictionary *eventDict = [_frameDropCounter.toDict mutableCopy];
eventDict[@"total_frame_drop_measured"] = @(_droppedFrames);
eventDict[@"keep_late_frames"] = @(keepLateFrames);
// if user select none of the lenses when activing the lenses scroll view, we still enable keepLateFrames
eventDict[@"lenses_applied"] = @(lensesApplied);
[[SCLogger sharedInstance] logEvent:kSCCameraMetricsFramesDroppedDuringRecording parameters:eventDict];
// Reset
_frameDropCounter = nil;
_droppedFrames = 0;
}
- (void)didChangeCaptureDevicePosition
{
[_frameDropCounter didChangeCaptureDevicePosition];
}
#pragma mark - SCManagedVideoDataSourceListener
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
{
[self.frameDropCounter processFrameTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
}
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
{
_droppedFrames += 1;
NSDictionary<NSString *, NSNumber *> *backgroundTaskScreenshot = SCBackgrounTaskScreenshotReport();
SCLogCoreCameraInfo(@"[SCManagedDroppedFramesReporter] frame dropped, background tasks: %@",
backgroundTaskScreenshot);
}
@end

View File

@ -0,0 +1,57 @@
//
// SCManagedFrameHealthChecker.h
// Snapchat
//
// Created by Pinlin Chen on 30/08/2017.
//
#import <SCBase/SCMacros.h>
#import <SCFeatureGating/SCExperimentManager.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
@interface SCManagedFrameHealthChecker : NSObject
+ (SCManagedFrameHealthChecker *)sharedInstance;
/*! @abstract Use sharedInstance instead. */
SC_INIT_AND_NEW_UNAVAILABLE;
/* Utility method */
- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo;
- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer
photoCapturerEnabled:(BOOL)photoCapturerEnabled
lensEnabled:(BOOL)lensesEnabled
lensID:(NSString *)lensID;
- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata
photoCapturerEnabled:(BOOL)photoCapturerEnabled
lensEnabled:(BOOL)lensesEnabled
lensID:(NSString *)lensID;
- (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset;
/* Image snap */
- (void)checkImageHealthForCaptureFrameImage:(UIImage *)image
captureSettings:(NSDictionary *)captureSettings
captureSessionID:(NSString *)captureSessionID;
- (void)checkImageHealthForPreTranscoding:(UIImage *)image
metadata:(NSDictionary *)metadata
captureSessionID:(NSString *)captureSessionID;
- (void)checkImageHealthForPostTranscoding:(NSData *)imageData
metadata:(NSDictionary *)metadata
captureSessionID:(NSString *)captureSessionID;
/* Video snap */
- (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image
metedata:(NSDictionary *)metadata
captureSessionID:(NSString *)captureSessionID;
- (void)checkVideoHealthForOverlayImage:(UIImage *)image
metedata:(NSDictionary *)metadata
captureSessionID:(NSString *)captureSessionID;
- (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image
metedata:(NSDictionary *)metadata
properties:(NSDictionary *)properties
captureSessionID:(NSString *)captureSessionID;
- (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID;
@end

View File

@ -0,0 +1,709 @@
//
// SCManagedFrameHealthChecker.m
// Snapchat
//
// Created by Pinlin Chen on 30/08/2017.
//
#import "SCManagedFrameHealthChecker.h"
#import "SCCameraSettingUtils.h"
#import "SCCameraTweaks.h"
#import <SCFoundation/AVAsset+Helpers.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCLogHelper.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTraceODPCompatible.h>
#import <SCFoundation/UIImage+Helpers.h>
#import <SCLogger/SCCameraMetrics.h>
#import <SCLogger/SCLogger+Stats.h>
#import <SCWebP/UIImage+WebP.h>
#import <ImageIO/CGImageProperties.h>
@import Accelerate;
static const char *kSCManagedFrameHealthCheckerQueueLabel = "com.snapchat.frame_health_checker";
static const int kSCManagedFrameHealthCheckerMaxSamples = 2304;
static const float kSCManagedFrameHealthCheckerPossibleBlackThreshold = 20.0;
static const float kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength = 300.0;
static const float kSCManagedFrameHealthCheckerScaledImageScale = 1.0;
// assume we could process at most of 2 RGBA images which are 2304*4096 RGBA image
static const double kSCManagedFrameHealthCheckerMinFreeMemMB = 72.0;
typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckType) {
SCManagedFrameHealthCheck_ImageCapture = 0,
SCManagedFrameHealthCheck_ImagePreTranscoding,
SCManagedFrameHealthCheck_ImagePostTranscoding,
SCManagedFrameHealthCheck_VideoCapture,
SCManagedFrameHealthCheck_VideoOverlayImage,
SCManagedFrameHealthCheck_VideoPostTranscoding,
};
typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckErrorType) {
SCManagedFrameHealthCheckError_None = 0,
SCManagedFrameHealthCheckError_Invalid_Bitmap,
SCManagedFrameHealthCheckError_Frame_Possibly_Black,
SCManagedFrameHealthCheckError_Frame_Totally_Black,
SCManagedFrameHealthCheckError_Execution_Error,
};
typedef struct {
float R;
float G;
float B;
float A;
} FloatRGBA;
@class SCManagedFrameHealthCheckerTask;
typedef NSMutableDictionary * (^sc_managed_frame_checker_block)(SCManagedFrameHealthCheckerTask *task);
float vDspColorElementSum(const Byte *data, NSInteger stripLength, NSInteger bufferLength)
{
float sum = 0;
float colorArray[bufferLength];
// Convert to float for DSP registerator
vDSP_vfltu8(data, stripLength, colorArray, 1, bufferLength);
// Calculate sum of color element
vDSP_sve(colorArray, 1, &sum, bufferLength);
return sum;
}
@interface SCManagedFrameHealthCheckerTask : NSObject
@property (nonatomic, assign) SCManagedFrameHealthCheckType type;
@property (nonatomic, strong) id targetObject;
@property (nonatomic, assign) CGSize sourceImageSize;
@property (nonatomic, strong) UIImage *unifiedImage;
@property (nonatomic, strong) NSDictionary *metadata;
@property (nonatomic, strong) NSDictionary *videoProperties;
@property (nonatomic, assign) SCManagedFrameHealthCheckErrorType errorType;
+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type
targetObject:(id)targetObject
metadata:(NSDictionary *)metadata
videoProperties:(NSDictionary *)videoProperties;
+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type
targetObject:(id)targetObject
metadata:(NSDictionary *)metadata;
@end
@implementation SCManagedFrameHealthCheckerTask
+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type
targetObject:(id)targetObject
metadata:(NSDictionary *)metadata
{
return [self taskWithType:type targetObject:targetObject metadata:metadata videoProperties:nil];
}
+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type
targetObject:(id)targetObject
metadata:(NSDictionary *)metadata
videoProperties:(NSDictionary *)videoProperties
{
SCManagedFrameHealthCheckerTask *task = [[SCManagedFrameHealthCheckerTask alloc] init];
task.type = type;
task.targetObject = targetObject;
task.metadata = metadata;
task.videoProperties = videoProperties;
return task;
}
- (NSString *)textForSnapType
{
switch (self.type) {
case SCManagedFrameHealthCheck_ImageCapture:
case SCManagedFrameHealthCheck_ImagePreTranscoding:
case SCManagedFrameHealthCheck_ImagePostTranscoding:
return @"IMAGE";
case SCManagedFrameHealthCheck_VideoCapture:
case SCManagedFrameHealthCheck_VideoOverlayImage:
case SCManagedFrameHealthCheck_VideoPostTranscoding:
return @"VIDEO";
}
}
- (NSString *)textForSource
{
switch (self.type) {
case SCManagedFrameHealthCheck_ImageCapture:
return @"CAPTURE";
case SCManagedFrameHealthCheck_ImagePreTranscoding:
return @"PRE_TRANSCODING";
case SCManagedFrameHealthCheck_ImagePostTranscoding:
return @"POST_TRANSCODING";
case SCManagedFrameHealthCheck_VideoCapture:
return @"CAPTURE";
case SCManagedFrameHealthCheck_VideoOverlayImage:
return @"OVERLAY_IMAGE";
case SCManagedFrameHealthCheck_VideoPostTranscoding:
return @"POST_TRANSCODING";
}
}
- (NSString *)textForErrorType
{
switch (self.errorType) {
case SCManagedFrameHealthCheckError_None:
return nil;
case SCManagedFrameHealthCheckError_Invalid_Bitmap:
return @"Invalid_Bitmap";
case SCManagedFrameHealthCheckError_Frame_Possibly_Black:
return @"Frame_Possibly_Black";
case SCManagedFrameHealthCheckError_Frame_Totally_Black:
return @"Frame_Totally_Black";
case SCManagedFrameHealthCheckError_Execution_Error:
return @"Execution_Error";
}
}
@end
@interface SCManagedFrameHealthChecker () {
id<SCPerforming> _performer;
// Dictionary structure
// Key - NSString, captureSessionID
// Value - NSMutableArray<SCManagedFrameHealthCheckerTask>
NSMutableDictionary *_frameCheckTasks;
}
@end
@implementation SCManagedFrameHealthChecker
+ (SCManagedFrameHealthChecker *)sharedInstance
{
SCTraceODPCompatibleStart(2);
static SCManagedFrameHealthChecker *checker;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
checker = [[SCManagedFrameHealthChecker alloc] _init];
});
return checker;
}
- (instancetype)_init
{
SCTraceODPCompatibleStart(2);
if (self = [super init]) {
// Use the lowest QoS level
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedFrameHealthCheckerQueueLabel
qualityOfService:QOS_CLASS_UTILITY
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
_frameCheckTasks = [NSMutableDictionary dictionary];
}
return self;
}
- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
SCTraceODPCompatibleStart(2);
// add exposure, ISO, brightness
NSMutableDictionary *metadata = [NSMutableDictionary dictionary];
if (!sampleBuffer || !CMSampleBufferDataIsReady(sampleBuffer)) {
return metadata;
}
CFDictionaryRef exifAttachments =
(CFDictionaryRef)CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL);
NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments);
if (exposureTimeNum) {
metadata[@"exposure"] = exposureTimeNum;
}
NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments);
if (isoSpeedRatingNum) {
metadata[@"iso"] = isoSpeedRatingNum;
}
NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments);
if (brightnessNum) {
float brightness = [brightnessNum floatValue];
metadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0);
}
return metadata;
}
- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata
{
SCTraceODPCompatibleStart(2);
// add exposure, ISO, brightness
NSMutableDictionary *newMetadata = [NSMutableDictionary dictionary];
CFDictionaryRef exifAttachments = (__bridge CFDictionaryRef)metadata;
NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments);
if (exposureTimeNum) {
newMetadata[@"exposure"] = exposureTimeNum;
}
NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments);
if (isoSpeedRatingNum) {
newMetadata[@"iso"] = isoSpeedRatingNum;
}
NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments);
if (brightnessNum) {
float brightness = [brightnessNum floatValue];
newMetadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0);
}
return newMetadata;
}
- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo
{
SCTraceODPCompatibleStart(2);
NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer];
[metadata addEntriesFromDictionary:extraInfo];
return metadata;
}
- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer
photoCapturerEnabled:(BOOL)photoCapturerEnabled
lensEnabled:(BOOL)lensesEnabled
lensID:(NSString *)lensID
{
SCTraceODPCompatibleStart(2);
NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer];
metadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled);
metadata[@"lens_enabled"] = @(lensesEnabled);
if (lensesEnabled) {
metadata[@"lens_id"] = lensID ?: @"";
}
return metadata;
}
- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata
photoCapturerEnabled:(BOOL)photoCapturerEnabled
lensEnabled:(BOOL)lensesEnabled
lensID:(NSString *)lensID
{
SCTraceODPCompatibleStart(2);
NSMutableDictionary *newMetadata = [self metadataForMetadata:metadata];
newMetadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled);
newMetadata[@"lens_enabled"] = @(lensesEnabled);
if (lensesEnabled) {
newMetadata[@"lens_id"] = lensID ?: @"";
}
return newMetadata;
}
- (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset
{
SCTraceODPCompatibleStart(2);
SC_GUARD_ELSE_RETURN_VALUE(asset != nil, nil);
NSMutableDictionary *properties = [NSMutableDictionary dictionary];
// file size
properties[@"file_size"] = @([asset fileSize]);
// duration
properties[@"duration"] = @(CMTimeGetSeconds(asset.duration));
// video track count
NSArray<AVAssetTrack *> *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
properties[@"video_track_count"] = @(videoTracks.count);
if (videoTracks.count > 0) {
// video bitrate
properties[@"video_bitrate"] = @([videoTracks.firstObject estimatedDataRate]);
// frame rate
properties[@"video_frame_rate"] = @([videoTracks.firstObject nominalFrameRate]);
}
// audio track count
NSArray<AVAssetTrack *> *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
properties[@"audio_track_count"] = @(audioTracks.count);
if (audioTracks.count > 0) {
// audio bitrate
properties[@"audio_bitrate"] = @([audioTracks.firstObject estimatedDataRate]);
}
// playable
properties[@"playable"] = @(asset.isPlayable);
return properties;
}
#pragma mark - Image snap
- (void)checkImageHealthForCaptureFrameImage:(UIImage *)image
captureSettings:(NSDictionary *)captureSettings
captureSessionID:(NSString *)captureSessionID
{
SCTraceODPCompatibleStart(2);
if (captureSessionID.length == 0) {
SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:CAPTURE - captureSessionID shouldn't be empty");
return;
}
SCManagedFrameHealthCheckerTask *task =
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImageCapture
targetObject:image
metadata:captureSettings];
[self _addTask:task withCaptureSessionID:captureSessionID];
}
- (void)checkImageHealthForPreTranscoding:(UIImage *)image
metadata:(NSDictionary *)metadata
captureSessionID:(NSString *)captureSessionID
{
SCTraceODPCompatibleStart(2);
if (captureSessionID.length == 0) {
SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:PRE_CAPTURE - captureSessionID shouldn't be empty");
return;
}
SCManagedFrameHealthCheckerTask *task =
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePreTranscoding
targetObject:image
metadata:metadata];
[self _addTask:task withCaptureSessionID:captureSessionID];
}
- (void)checkImageHealthForPostTranscoding:(NSData *)imageData
metadata:(NSDictionary *)metadata
captureSessionID:(NSString *)captureSessionID
{
SCTraceODPCompatibleStart(2);
if (captureSessionID.length == 0) {
SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:POST_CAPTURE - captureSessionID shouldn't be empty");
return;
}
SCManagedFrameHealthCheckerTask *task =
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePostTranscoding
targetObject:imageData
metadata:metadata];
[self _addTask:task withCaptureSessionID:captureSessionID];
}
#pragma mark - Video snap
- (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image
metedata:(NSDictionary *)metadata
captureSessionID:(NSString *)captureSessionID
{
SCTraceODPCompatibleStart(2);
if (captureSessionID.length == 0) {
SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:CAPTURE - captureSessionID shouldn't be empty");
return;
}
SCManagedFrameHealthCheckerTask *task =
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoCapture
targetObject:image
metadata:metadata];
[self _addTask:task withCaptureSessionID:captureSessionID];
}
- (void)checkVideoHealthForOverlayImage:(UIImage *)image
metedata:(NSDictionary *)metadata
captureSessionID:(NSString *)captureSessionID
{
SCTraceODPCompatibleStart(2);
if (captureSessionID.length == 0) {
SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - captureSessionID shouldn't be empty");
return;
}
// Overlay image could be nil
if (!image) {
SCLogCoreCameraInfo(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - overlayImage is nil.");
return;
}
SCManagedFrameHealthCheckerTask *task =
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoOverlayImage
targetObject:image
metadata:metadata];
[self _addTask:task withCaptureSessionID:captureSessionID];
}
- (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image
metedata:(NSDictionary *)metadata
properties:(NSDictionary *)properties
captureSessionID:(NSString *)captureSessionID
{
SCTraceODPCompatibleStart(2);
if (captureSessionID.length == 0) {
SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:POST_TRANSCODING - captureSessionID shouldn't be empty");
return;
}
SCManagedFrameHealthCheckerTask *task =
[SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoPostTranscoding
targetObject:image
metadata:metadata
videoProperties:properties];
[self _addTask:task withCaptureSessionID:captureSessionID];
}
#pragma mark - Task management
- (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID
{
SCTraceODPCompatibleStart(2);
if (!captureSessionID) {
SCLogCoreCameraError(@"[FrameHealthChecker] report - captureSessionID shouldn't be nil");
return;
}
[self _asynchronouslyCheckForCaptureSessionID:captureSessionID];
}
#pragma mark - Private functions
/// Scale the source image to a new image with edges less than kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength.
- (UIImage *)_unifyImage:(UIImage *)sourceImage
{
CGFloat sourceWidth = sourceImage.size.width;
CGFloat sourceHeight = sourceImage.size.height;
if (sourceWidth == 0.0 || sourceHeight == 0.0) {
SCLogCoreCameraInfo(@"[FrameHealthChecker] Tried scaling image with no size");
return sourceImage;
}
CGFloat maxEdgeLength = kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength;
CGFloat widthScalingFactor = maxEdgeLength / sourceWidth;
CGFloat heightScalingFactor = maxEdgeLength / sourceHeight;
CGFloat scalingFactor = MIN(widthScalingFactor, heightScalingFactor);
if (scalingFactor >= 1) {
SCLogCoreCameraInfo(@"[FrameHealthChecker] No need to scale image.");
return sourceImage;
}
CGSize targetSize = CGSizeMake(sourceWidth * scalingFactor, sourceHeight * scalingFactor);
SCLogCoreCameraInfo(@"[FrameHealthChecker] Scaling image from %@ to %@", NSStringFromCGSize(sourceImage.size),
NSStringFromCGSize(targetSize));
return [sourceImage scaledImageToSize:targetSize scale:kSCManagedFrameHealthCheckerScaledImageScale];
}
- (void)_addTask:(SCManagedFrameHealthCheckerTask *)newTask withCaptureSessionID:(NSString *)captureSessionID
{
SCTraceODPCompatibleStart(2);
if (captureSessionID.length == 0) {
return;
}
[_performer perform:^{
SCTraceODPCompatibleStart(2);
CFTimeInterval beforeScaling = CACurrentMediaTime();
if (newTask.targetObject) {
if ([newTask.targetObject isKindOfClass:[UIImage class]]) {
UIImage *sourceImage = (UIImage *)newTask.targetObject;
newTask.unifiedImage = [self _unifyImage:sourceImage];
newTask.sourceImageSize = sourceImage.size;
} else if ([newTask.targetObject isKindOfClass:[NSData class]]) {
UIImage *sourceImage = [UIImage sc_imageWithData:newTask.targetObject];
CFTimeInterval betweenDecodingAndScaling = CACurrentMediaTime();
SCLogCoreCameraInfo(@"[FrameHealthChecker] #Image decoding delay: %f",
betweenDecodingAndScaling - beforeScaling);
beforeScaling = betweenDecodingAndScaling;
newTask.unifiedImage = [self _unifyImage:sourceImage];
newTask.sourceImageSize = sourceImage.size;
} else {
SCLogCoreCameraError(@"[FrameHealthChecker] Invalid targetObject class:%@",
NSStringFromClass([newTask.targetObject class]));
}
newTask.targetObject = nil;
}
SCLogCoreCameraInfo(@"[FrameHealthChecker] #Scale image delay: %f", CACurrentMediaTime() - beforeScaling);
NSMutableArray *taskQueue = _frameCheckTasks[captureSessionID];
if (!taskQueue) {
taskQueue = [NSMutableArray array];
_frameCheckTasks[captureSessionID] = taskQueue;
}
// Remove previous same type task, avoid meaningless task,
// for example repeat click "Send Button" and then "Back button"
// will produce a lot of PRE_TRANSCODING and POST_TRANSCODING
for (SCManagedFrameHealthCheckerTask *task in taskQueue) {
if (task.type == newTask.type) {
[taskQueue removeObject:task];
break;
}
}
[taskQueue addObject:newTask];
}];
}
- (void)_asynchronouslyCheckForCaptureSessionID:(NSString *)captureSessionID
{
SCTraceODPCompatibleStart(2);
[_performer perform:^{
SCTraceODPCompatibleStart(2);
NSMutableArray *tasksQueue = _frameCheckTasks[captureSessionID];
if (!tasksQueue) {
return;
}
// Check the free memory, if it is too low, drop these tasks
double memFree = [SCLogger memoryFreeMB];
if (memFree < kSCManagedFrameHealthCheckerMinFreeMemMB) {
SCLogCoreCameraWarning(
@"[FrameHealthChecker] mem_free:%f is too low, dropped checking tasks for captureSessionID:%@", memFree,
captureSessionID);
[_frameCheckTasks removeObjectForKey:captureSessionID];
return;
}
__block NSMutableArray *frameHealthInfoArray = [NSMutableArray array];
// Execute all tasks and wait for complete
[tasksQueue enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) {
SCManagedFrameHealthCheckerTask *task = (SCManagedFrameHealthCheckerTask *)obj;
NSMutableDictionary *frameHealthInfo;
UIImage *image = task.unifiedImage;
if (image) {
// Get frame health info
frameHealthInfo = [self _getFrameHealthInfoForImage:image
source:[task textForSource]
snapType:[task textForSnapType]
metadata:task.metadata
sourceImageSize:task.sourceImageSize
captureSessionID:captureSessionID];
NSNumber *isPossibleBlackNum = frameHealthInfo[@"is_possible_black"];
NSNumber *isTotallyBlackNum = frameHealthInfo[@"is_total_black"];
NSNumber *hasExecutionError = frameHealthInfo[@"execution_error"];
if ([isTotallyBlackNum boolValue]) {
task.errorType = SCManagedFrameHealthCheckError_Frame_Totally_Black;
} else if ([isPossibleBlackNum boolValue]) {
task.errorType = SCManagedFrameHealthCheckError_Frame_Possibly_Black;
} else if ([hasExecutionError boolValue]) {
task.errorType = SCManagedFrameHealthCheckError_Execution_Error;
}
} else {
frameHealthInfo = [NSMutableDictionary dictionary];
task.errorType = SCManagedFrameHealthCheckError_Invalid_Bitmap;
}
if (frameHealthInfo) {
frameHealthInfo[@"frame_source"] = [task textForSource];
frameHealthInfo[@"snap_type"] = [task textForSnapType];
frameHealthInfo[@"error_type"] = [task textForErrorType];
frameHealthInfo[@"capture_session_id"] = captureSessionID;
frameHealthInfo[@"metadata"] = task.metadata;
if (task.videoProperties.count > 0) {
[frameHealthInfo addEntriesFromDictionary:task.videoProperties];
}
[frameHealthInfoArray addObject:frameHealthInfo];
}
// Release the image as soon as possible to mitigate the memory pressure
task.unifiedImage = nil;
}];
for (NSDictionary *frameHealthInfo in frameHealthInfoArray) {
if ([frameHealthInfo[@"is_total_black"] boolValue] || [frameHealthInfo[@"is_possible_black"] boolValue]) {
// // TODO: Zi Kai Chen - add this back. Normally we use id<SCManiphestTicketCreator> for
// this but as this is a shared instance we cannot easily inject it. The work would
// involve making this not a shared instance.
// SCShakeBetaLogEvent(SCShakeBetaLoggerKeyCCamBlackSnap,
// JSONStringSerializeObjectForLogging(frameHealthInfo));
}
[[SCLogger sharedInstance] logUnsampledEventToEventLogger:kSCCameraMetricsFrameHealthCheckIndex
parameters:frameHealthInfo
secretParameters:nil
metrics:nil];
}
[_frameCheckTasks removeObjectForKey:captureSessionID];
}];
}
- (NSMutableDictionary *)_getFrameHealthInfoForImage:(UIImage *)image
source:(NSString *)source
snapType:(NSString *)snapType
metadata:(NSDictionary *)metadata
sourceImageSize:(CGSize)sourceImageSize
captureSessionID:(NSString *)captureSessionID
{
SCTraceODPCompatibleStart(2);
NSMutableDictionary *parameters = [NSMutableDictionary dictionary];
size_t samplesCount = 0;
CFTimeInterval start = CACurrentMediaTime();
CGImageRef imageRef = image.CGImage;
size_t imageWidth = CGImageGetWidth(imageRef);
size_t imageHeight = CGImageGetHeight(imageRef);
CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(imageRef));
CFTimeInterval getImageDataTime = CACurrentMediaTime();
if (pixelData) {
const Byte *imageData = CFDataGetBytePtr(pixelData);
NSInteger stripLength = 0;
NSInteger bufferLength = 0;
NSInteger imagePixels = imageWidth * imageHeight;
// Limit the max sampled frames
if (imagePixels > kSCManagedFrameHealthCheckerMaxSamples) {
stripLength = imagePixels / kSCManagedFrameHealthCheckerMaxSamples * 4;
bufferLength = kSCManagedFrameHealthCheckerMaxSamples;
} else {
stripLength = 4;
bufferLength = imagePixels;
}
samplesCount = bufferLength;
// Avoid dividing by zero
if (samplesCount != 0) {
FloatRGBA sumRGBA = [self _getSumRGBAFromData:imageData
stripLength:stripLength
bufferLength:bufferLength
bitmapInfo:CGImageGetBitmapInfo(imageRef)];
float averageR = sumRGBA.R / samplesCount;
float averageG = sumRGBA.G / samplesCount;
float averageB = sumRGBA.B / samplesCount;
float averageA = sumRGBA.A / samplesCount;
parameters[@"average_sampled_rgba_r"] = @(averageR);
parameters[@"average_sampled_rgba_g"] = @(averageG);
parameters[@"average_sampled_rgba_b"] = @(averageB);
parameters[@"average_sampled_rgba_a"] = @(averageA);
parameters[@"origin_frame_width"] = @(sourceImageSize.width);
parameters[@"origin_frame_height"] = @(sourceImageSize.height);
// Also report possible black to identify the intentional black snap by covering camera.
// Normally, the averageA very near 255, but for video overlay image, it is very small.
// So we use averageA > 250 to avoid considing video overlay image as possible black.
if (averageA > 250 && averageR < kSCManagedFrameHealthCheckerPossibleBlackThreshold &&
averageG < kSCManagedFrameHealthCheckerPossibleBlackThreshold &&
averageB < kSCManagedFrameHealthCheckerPossibleBlackThreshold) {
parameters[@"is_possible_black"] = @(YES);
// Use this parameters for BigQuery conditions in Grafana
if (averageR == 0 && averageG == 0 && averageB == 0) {
parameters[@"is_total_black"] = @(YES);
}
}
} else {
SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - samplesCount is zero! captureSessionID:%@", snapType,
source, captureSessionID);
parameters[@"execution_error"] = @(YES);
}
CFRelease(pixelData);
} else {
SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - pixelData is nil! captureSessionID:%@", snapType, source,
captureSessionID);
parameters[@"execution_error"] = @(YES);
}
parameters[@"sample_size"] = @(samplesCount);
CFTimeInterval end = CACurrentMediaTime();
SCLogCoreCameraInfo(@"[FrameHealthChecker] #%@:%@ - GET_IMAGE_DATA_TIME:%f SAMPLE_DATA_TIME:%f TOTAL_TIME:%f",
snapType, source, getImageDataTime - start, end - getImageDataTime, end - start);
return parameters;
}
- (FloatRGBA)_getSumRGBAFromData:(const Byte *)imageData
stripLength:(NSInteger)stripLength
bufferLength:(NSInteger)bufferLength
bitmapInfo:(CGBitmapInfo)bitmapInfo
{
SCTraceODPCompatibleStart(2);
FloatRGBA sumRGBA;
if ((bitmapInfo & kCGImageAlphaPremultipliedFirst) && (bitmapInfo & kCGImageByteOrder32Little)) {
// BGRA
sumRGBA.B = vDspColorElementSum(imageData, stripLength, bufferLength);
sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength);
sumRGBA.R = vDspColorElementSum(imageData + 2, stripLength, bufferLength);
sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength);
} else {
// TODO. support other types beside RGBA
sumRGBA.R = vDspColorElementSum(imageData, stripLength, bufferLength);
sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength);
sumRGBA.B = vDspColorElementSum(imageData + 2, stripLength, bufferLength);
sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength);
}
return sumRGBA;
}
@end

View File

@ -0,0 +1,18 @@
//
// SCManagedFrontFlashController.h
// Snapchat
//
// Created by Liu Liu on 5/4/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import <Foundation/Foundation.h>
// This object is only access on SCManagedCapturer thread
@interface SCManagedFrontFlashController : NSObject
@property (nonatomic, assign) BOOL flashActive;
@property (nonatomic, assign) BOOL torchActive;
@end

View File

@ -0,0 +1,105 @@
//
// SCManagedFrontFlashController.m
// Snapchat
//
// Created by Liu Liu on 5/4/15.
// Copyright (c) 2015 Liu Liu. All rights reserved.
//
#import "SCManagedFrontFlashController.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCThreadHelpers.h>
#import <SCFoundation/SCTrace.h>
@import UIKit;
@implementation SCManagedFrontFlashController {
BOOL _active;
UIView *_brightView;
CGFloat _brightnessWhenFlashAndTorchOff;
}
- (void)_setScreenWithFrontViewFlashActive:(BOOL)flashActive torchActive:(BOOL)torchActive
{
SCTraceStart();
SCAssertMainThread();
BOOL wasActive = _active;
_active = flashActive || torchActive;
if (!wasActive && _active) {
[self _activateFlash:flashActive];
} else if (wasActive && !_active) {
[self _deactivateFlash];
}
}
- (void)_activateFlash:(BOOL)flashActive
{
UIWindow *mainWindow = [[UIApplication sharedApplication] keyWindow];
if (!_brightView) {
CGRect frame = [mainWindow bounds];
CGFloat maxLength = MAX(CGRectGetWidth(frame), CGRectGetHeight(frame));
frame.size = CGSizeMake(maxLength, maxLength);
// Using the max length on either side to be compatible with different orientations
_brightView = [[UIView alloc] initWithFrame:frame];
_brightView.userInteractionEnabled = NO;
_brightView.backgroundColor = [UIColor whiteColor];
}
_brightnessWhenFlashAndTorchOff = [UIScreen mainScreen].brightness;
SCLogGeneralInfo(@"[SCManagedFrontFlashController] Activating flash, setting screen brightness from %f to 1.0",
_brightnessWhenFlashAndTorchOff);
[self _brightenLoop];
_brightView.alpha = flashActive ? 1.0 : 0.75;
[mainWindow addSubview:_brightView];
}
- (void)_deactivateFlash
{
SCLogGeneralInfo(@"[SCManagedFrontFlashController] Deactivating flash, setting screen brightness from %f to %f",
[UIScreen mainScreen].brightness, _brightnessWhenFlashAndTorchOff);
[UIScreen mainScreen].brightness = _brightnessWhenFlashAndTorchOff;
if (_brightView) {
[_brightView removeFromSuperview];
}
}
- (void)_brightenLoop
{
if (_active) {
SCLogGeneralInfo(@"[SCManagedFrontFlashController] In brighten loop, setting brightness from %f to 1.0",
[UIScreen mainScreen].brightness);
[UIScreen mainScreen].brightness = 1.0;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_SEC / 2), dispatch_get_main_queue(), ^(void) {
[self _brightenLoop];
});
} else {
SCLogGeneralInfo(@"[SCManagedFrontFlashController] Recording is done, brighten loop ends");
}
}
- (void)setFlashActive:(BOOL)flashActive
{
SCTraceStart();
if (_flashActive != flashActive) {
_flashActive = flashActive;
BOOL torchActive = _torchActive;
runOnMainThreadAsynchronously(^{
[self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive];
});
}
}
- (void)setTorchActive:(BOOL)torchActive
{
SCTraceStart();
if (_torchActive != torchActive) {
_torchActive = torchActive;
BOOL flashActive = _flashActive;
runOnMainThreadAsynchronously(^{
[self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive];
});
}
}
@end

View File

@ -0,0 +1,13 @@
//
// SCManagedLegacyStillImageCapturer.h
// Snapchat
//
// Created by Chao Pang on 10/4/16.
// Copyright © 2016 Snapchat, Inc. All rights reserved.
//
#import "SCManagedStillImageCapturer.h"
@interface SCManagedLegacyStillImageCapturer : SCManagedStillImageCapturer
@end

View File

@ -0,0 +1,460 @@
//
// SCManagedLegacyStillImageCapturer.m
// Snapchat
//
// Created by Chao Pang on 10/4/16.
// Copyright © 2016 Snapchat, Inc. All rights reserved.
//
#import "SCManagedLegacyStillImageCapturer.h"
#import "AVCaptureConnection+InputDevice.h"
#import "SCCameraTweaks.h"
#import "SCLogger+Camera.h"
#import "SCManagedCapturer.h"
#import "SCManagedStillImageCapturer_Protected.h"
#import "SCStillImageCaptureVideoInputMethod.h"
#import <SCCrashLogger/SCCrashLogger.h>
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCLog.h>
#import <SCFoundation/SCPerforming.h>
#import <SCFoundation/SCQueuePerformer.h>
#import <SCFoundation/SCTrace.h>
#import <SCLenses/SCLens.h>
#import <SCLogger/SCCameraMetrics.h>
#import <SCWebP/UIImage+WebP.h>
@import ImageIO;
static NSString *const kSCLegacyStillImageCaptureDefaultMethodErrorDomain =
@"kSCLegacyStillImageCaptureDefaultMethodErrorDomain";
static NSString *const kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain =
@"kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain";
static NSInteger const kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException = 10000;
static NSInteger const kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException = 10001;
@implementation SCManagedLegacyStillImageCapturer {
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
AVCaptureStillImageOutput *_stillImageOutput;
#pragma clang diagnostic pop
BOOL _shouldCapture;
NSUInteger _retries;
SCStillImageCaptureVideoInputMethod *_videoFileMethod;
}
- (instancetype)initWithSession:(AVCaptureSession *)session
performer:(id<SCPerforming>)performer
lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore
delegate:(id<SCManagedStillImageCapturerDelegate>)delegate
{
SCTraceStart();
self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate];
if (self) {
[self setupWithSession:session];
}
return self;
}
- (void)setupWithSession:(AVCaptureSession *)session
{
SCTraceStart();
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
#pragma clang diagnostic pop
_stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
[self setAsOutput:session];
}
- (void)setAsOutput:(AVCaptureSession *)session
{
SCTraceStart();
if ([session canAddOutput:_stillImageOutput]) {
[session addOutput:_stillImageOutput];
}
}
- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled
{
SCTraceStart();
if (_stillImageOutput.isHighResolutionStillImageOutputEnabled != highResolutionStillImageOutputEnabled) {
_stillImageOutput.highResolutionStillImageOutputEnabled = highResolutionStillImageOutputEnabled;
}
}
- (void)setPortraitModeCaptureEnabled:(BOOL)enabled
{
// Legacy capturer only used on devices running versions under 10.2, which don't support depth data
// so this function is never called and does not need to be implemented
}
- (void)enableStillImageStabilization
{
SCTraceStart();
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (_stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported) {
_stillImageOutput.lensStabilizationDuringBracketedCaptureEnabled = YES;
}
#pragma clang diagnostic pop
}
- (void)removeAsOutput:(AVCaptureSession *)session
{
SCTraceStart();
[session removeOutput:_stillImageOutput];
}
- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio
atZoomFactor:(float)zoomFactor
fieldOfView:(float)fieldOfView
state:(SCManagedCapturerState *)state
captureSessionID:(NSString *)captureSessionID
shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo
completionHandler:
(sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler
{
SCTraceStart();
SCAssert(completionHandler, @"completionHandler shouldn't be nil");
_retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds
_aspectRatio = aspectRatio;
_zoomFactor = zoomFactor;
_fieldOfView = fieldOfView;
_state = state;
_captureSessionID = captureSessionID;
_shouldCaptureFromVideo = shouldCaptureFromVideo;
SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying "
@"current completion handler.");
_completionHandler = [completionHandler copy];
[[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart];
if (!_adjustingExposureManualDetect) {
SCLogCoreCameraInfo(@"Capturing still image now");
[self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo];
_shouldCapture = NO;
} else {
SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image");
_shouldCapture = YES;
[self _deadlineCaptureStillImage];
}
}
#pragma mark - SCManagedDeviceCapacityAnalyzerListener
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeAdjustingExposure:(BOOL)adjustingExposure
{
SCTraceStart();
@weakify(self);
[_performer performImmediatelyIfCurrentPerformer:^{
// Since this is handled on a different thread, therefore, dispatch back to the queue we operated on.
@strongify(self);
SC_GUARD_ELSE_RETURN(self);
self->_adjustingExposureManualDetect = adjustingExposure;
[self _didChangeAdjustingExposure:adjustingExposure
withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect];
}];
}
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition
{
SCTraceStart();
@weakify(self);
[_performer performImmediatelyIfCurrentPerformer:^{
@strongify(self);
SC_GUARD_ELSE_RETURN(self);
self->_lightingConditionType = lightingCondition;
}];
}
#pragma mark - SCManagedCapturerListener
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state
{
SCTraceStart();
@weakify(self);
[_performer performImmediatelyIfCurrentPerformer:^{
@strongify(self);
SC_GUARD_ELSE_RETURN(self);
// Since this is handled on a different thread, therefore, dispatch back to the queue we operated on.
[self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO];
}];
}
#pragma mark - Private methods
- (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy
{
if (!adjustingExposure && self->_shouldCapture) {
SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy);
[self _captureStillImageWithExposureAdjustmentStrategy:strategy];
self->_shouldCapture = NO;
}
}
- (void)_deadlineCaptureStillImage
{
SCTraceStart();
// Use the SCManagedCapturer's private queue.
[_performer perform:^{
if (_shouldCapture) {
[self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline];
_shouldCapture = NO;
}
}
after:SCCameraTweaksExposureDeadline()];
}
- (void)_captureStillImageWithExposureAdjustmentStrategy:(NSString *)strategy
{
SCTraceStart();
[[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy];
if (_shouldCaptureFromVideo) {
[self captureStillImageFromVideoBuffer];
return;
}
SCAssert(_stillImageOutput, @"stillImageOutput shouldn't be nil");
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
AVCaptureStillImageOutput *stillImageOutput = _stillImageOutput;
#pragma clang diagnostic pop
AVCaptureConnection *captureConnection = [self _captureConnectionFromStillImageOutput:stillImageOutput];
SCManagedCapturerState *state = [_state copy];
dispatch_block_t legacyStillImageCaptureBlock = ^{
SCCAssertMainThread();
// If the application is not in background, and we have still image connection, do thecapture. Otherwise fail.
if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
[_performer performImmediatelyIfCurrentPerformer:^{
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler =
_completionHandler;
_completionHandler = nil;
completionHandler(nil, nil,
[NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain
code:kSCManagedStillImageCapturerApplicationStateBackground
userInfo:nil]);
}];
return;
}
#if !TARGET_IPHONE_SIMULATOR
if (!captureConnection) {
[_performer performImmediatelyIfCurrentPerformer:^{
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler =
_completionHandler;
_completionHandler = nil;
completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain
code:kSCManagedStillImageCapturerNoStillImageConnection
userInfo:nil]);
}];
return;
}
#endif
// Select appropriate image capture method
if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) {
if (!_videoFileMethod) {
_videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init];
}
[[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCapture"];
[[SCCoreCameraLogger sharedInstance]
logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCapture"];
[_videoFileMethod captureStillImageWithCapturerState:state
successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) {
[self _legacyStillImageCaptureDidSucceedWithImageData:imageData
sampleBuffer:nil
cameraInfo:cameraInfo
error:error];
}
failureBlock:^(NSError *error) {
[self _legacyStillImageCaptureDidFailWithError:error];
}];
} else {
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported && !state.flashActive) {
[self _captureStabilizedStillImageWithStillImageOutput:stillImageOutput
captureConnection:captureConnection
capturerState:state];
} else {
[self _captureStillImageWithStillImageOutput:stillImageOutput
captureConnection:captureConnection
capturerState:state];
}
#pragma clang diagnostic pop
}
};
// We need to call this on main thread and blocking.
[[SCQueuePerformer mainQueuePerformer] performAndWait:legacyStillImageCaptureBlock];
}
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- (void)_captureStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput
captureConnection:(AVCaptureConnection *)captureConnection
capturerState:(SCManagedCapturerState *)state
{
[[SCLogger sharedInstance] logStillImageCaptureApi:@"AVStillImageCaptureAsynchronous"];
[[SCCoreCameraLogger sharedInstance]
logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVStillImageCaptureAsynchronous"];
@try {
[stillImageOutput
captureStillImageAsynchronouslyFromConnection:captureConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer) {
NSData *imageData = [AVCaptureStillImageOutput
jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
[self
_legacyStillImageCaptureDidSucceedWithImageData:imageData
sampleBuffer:
imageDataSampleBuffer
cameraInfo:
cameraInfoForBuffer(
imageDataSampleBuffer)
error:error];
} else {
if (error.domain == AVFoundationErrorDomain && error.code == -11800) {
// iOS 7 "unknown error"; works if we retry
[self _legacyStillImageCaptureWillRetryWithError:error];
} else {
[self _legacyStillImageCaptureDidFailWithError:error];
}
}
}];
} @catch (NSException *e) {
[SCCrashLogger logHandledException:e];
[self _legacyStillImageCaptureDidFailWithError:
[NSError errorWithDomain:kSCLegacyStillImageCaptureDefaultMethodErrorDomain
code:kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException
userInfo:@{
@"exception" : e
}]];
}
}
- (void)_captureStabilizedStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput
captureConnection:(AVCaptureConnection *)captureConnection
capturerState:(SCManagedCapturerState *)state
{
[[SCLogger sharedInstance] logStillImageCaptureApi:@"AVStillImageOutputCaptureBracketAsynchronously"];
[[SCCoreCameraLogger sharedInstance]
logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVStillImageOutputCaptureBracketAsynchronously"];
NSArray *bracketArray = [self _bracketSettingsArray:captureConnection];
@try {
[stillImageOutput
captureStillImageBracketAsynchronouslyFromConnection:captureConnection
withSettingsArray:bracketArray
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer,
AVCaptureBracketedStillImageSettings *settings,
NSError *err) {
if (!imageDataSampleBuffer) {
[self _legacyStillImageCaptureDidFailWithError:err];
return;
}
NSData *jpegData = [AVCaptureStillImageOutput
jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
[self
_legacyStillImageCaptureDidSucceedWithImageData:jpegData
sampleBuffer:
imageDataSampleBuffer
cameraInfo:
cameraInfoForBuffer(
imageDataSampleBuffer)
error:nil];
}];
} @catch (NSException *e) {
[SCCrashLogger logHandledException:e];
[self _legacyStillImageCaptureDidFailWithError:
[NSError errorWithDomain:kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain
code:kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException
userInfo:@{
@"exception" : e
}]];
}
}
#pragma clang diagnostic pop
- (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection
{
NSInteger const stillCount = 1;
NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount];
AVCaptureDevice *device = [stillImageConnection inputDevice];
AVCaptureManualExposureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings
manualExposureSettingsWithExposureDuration:device.exposureDuration
ISO:AVCaptureISOCurrent];
for (NSInteger i = 0; i < stillCount; i++) {
[bracketSettingsArray addObject:settings];
}
return [bracketSettingsArray copy];
}
- (void)_legacyStillImageCaptureDidSucceedWithImageData:(NSData *)imageData
sampleBuffer:(CMSampleBufferRef)sampleBuffer
cameraInfo:(NSDictionary *)cameraInfo
error:(NSError *)error
{
[[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()];
[[SCCoreCameraLogger sharedInstance]
logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()];
if (sampleBuffer) {
CFRetain(sampleBuffer);
}
[_performer performImmediatelyIfCurrentPerformer:^{
UIImage *fullScreenImage = [self imageFromData:imageData
currentZoomFactor:_zoomFactor
targetAspectRatio:_aspectRatio
fieldOfView:_fieldOfView
state:_state
sampleBuffer:sampleBuffer];
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
_completionHandler = nil;
completionHandler(fullScreenImage, cameraInfo, error);
if (sampleBuffer) {
CFRelease(sampleBuffer);
}
}];
}
- (void)_legacyStillImageCaptureDidFailWithError:(NSError *)error
{
[_performer performImmediatelyIfCurrentPerformer:^{
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
_completionHandler = nil;
completionHandler(nil, nil, error);
}];
}
- (void)_legacyStillImageCaptureWillRetryWithError:(NSError *)error
{
if (_retries-- > 0) {
[_performer perform:^{
[self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo];
}
after:kSCCameraRetryInterval];
} else {
[self _legacyStillImageCaptureDidFailWithError:error];
}
}
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- (AVCaptureConnection *)_captureConnectionFromStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput
#pragma clang diagnostic pop
{
SCTraceStart();
SCAssert([_performer isCurrentPerformer], @"");
NSArray *connections = [stillImageOutput.connections copy];
for (AVCaptureConnection *connection in connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
return connection;
}
}
}
return nil;
}
@end

View File

@ -0,0 +1,13 @@
//
// SCManagedPhotoCapturer.h
// Snapchat
//
// Created by Chao Pang on 10/5/16.
// Copyright © 2016 Snapchat, Inc. All rights reserved.
//
#import "SCManagedStillImageCapturer.h"
@interface SCManagedPhotoCapturer : SCManagedStillImageCapturer
@end