FaceDetector implementation integrated

This commit is contained in:
Joao Fidelis 2018-01-21 22:20:57 -02:00
parent 160d07827b
commit 8ebeb530c5
16 changed files with 1140 additions and 67 deletions

View File

@ -0,0 +1,4 @@
target “RNCameraExample” do
pod 'GoogleMobileVision/FaceDetector'
pod 'GoogleMobileVision/MVDataOutput'
end

View File

@ -24,6 +24,21 @@
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSExceptionDomains</key>
<dict>
<key>localhost</key>
<dict>
<key>NSExceptionAllowsInsecureHTTPLoads</key>
<true/>
</dict>
</dict>
</dict>
<key>NSCameraUsageDescription</key>
<string>Used to take photos</string>
<key>NSLocationWhenInUseUsageDescription</key>
<string></string>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIRequiredDeviceCapabilities</key>
@ -38,19 +53,5 @@
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
<key>NSLocationWhenInUseUsageDescription</key>
<string></string>
<key>NSAppTransportSecurity</key>
<!--See http://ste.vn/2015/06/10/configuring-app-transport-security-ios-9-osx-10-11/ -->
<dict>
<key>NSExceptionDomains</key>
<dict>
<key>localhost</key>
<dict>
<key>NSExceptionAllowsInsecureHTTPLoads</key>
<true/>
</dict>
</dict>
</dict>
</dict>
</plist>

View File

@ -0,0 +1,33 @@
//
// EXFaceDetectorManager.h
// RCTCamera
//
// Created by Stanisław Chmiela on 22.11.2017.
// Copyright © 2017 650 Industries. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <GoogleMobileVision/GoogleMobileVision.h>
#import <GoogleMVDataOutput/GoogleMVDataOutput.h>
@protocol RNFaceDetectorDelegate
- (void)onFacesDetected:(NSArray<NSDictionary *> *)faces;
@end
@interface RNFaceDetectorManager : NSObject
- (NSDictionary *)constantsToExport;
+ (NSDictionary *)constants;
- (instancetype)initWithSessionQueue:(dispatch_queue_t)sessionQueue delegate:(id <RNFaceDetectorDelegate>)delegate;
- (void)setIsEnabled:(id)json;
- (void)setLandmarksDetected:(id)json;
- (void)setClassificationsDetected:(id)json;
- (void)setMode:(id)json;
- (void)maybeStartFaceDetectionOnSession:(AVCaptureSession *)session withPreviewLayer:(AVCaptureVideoPreviewLayer *)previewLayer;
- (void)stopFaceDetection;
@end

View File

@ -0,0 +1,260 @@
//
// RNFaceDetectorManager.m
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import <React/RCTConvert.h>
#import "RNCamera.h"
#import "RNFaceEncoder.h"
#import "RNFaceDetectorUtils.h"
#import "RNFaceDetectorModule.h"
#import "RNFaceDetectorManager.h"
@interface RNFaceDetectorManager() <GMVDataOutputDelegate>
@property (assign, nonatomic) long previousFacesCount;
@property (nonatomic, strong) GMVDataOutput *dataOutput;
@property (nonatomic, weak) AVCaptureSession *session;
@property (nonatomic, weak) dispatch_queue_t sessionQueue;
@property (nonatomic, assign, getter=isConnected) BOOL connected;
@property (nonatomic, weak) id <RNFaceDetectorDelegate> delegate;
@property (nonatomic, weak) AVCaptureVideoPreviewLayer *previewLayer;
@property (nonatomic, assign, getter=isDetectingFaces) BOOL faceDetecting;
@property (nonatomic, strong) NSMutableDictionary<NSString *, id> *faceDetectorOptions;
@end
@implementation RNFaceDetectorManager
static NSDictionary *defaultFaceDetectorOptions = nil;
- (NSDictionary *)constantsToExport
{
return [[self class] constants];
}
+ (NSDictionary *)constants
{
return [RNFaceDetectorUtils constantsToExport];
}
- (instancetype)initWithSessionQueue:(dispatch_queue_t)sessionQueue delegate:(id <RNFaceDetectorDelegate>)delegate
{
if (self = [super init]) {
_delegate = delegate;
_previousFacesCount = -1;
_sessionQueue = sessionQueue;
_faceDetectorOptions = [[NSMutableDictionary alloc] initWithDictionary:[[self class] _getDefaultFaceDetectorOptions]];
}
return self;
}
# pragma mark Properties setters
- (void)setSession:(AVCaptureSession *)session
{
_session = session;
}
# pragma mark - JS properties setters
- (void)setIsEnabled:(id)json
{
BOOL newFaceDetecting = [RCTConvert BOOL:json];
if ([self isDetectingFaces] != newFaceDetecting) {
_faceDetecting = newFaceDetecting;
[self _runBlockIfQueueIsPresent:^{
if ([self isDetectingFaces]) {
[self tryEnablingFaceDetection];
} else {
[self stopFaceDetection];
}
}];
}
}
- (void)setLandmarksDetected:(id)json
{
[self _updateOptionSettingForKey:GMVDetectorFaceLandmarkType withJSONValue:json];
}
- (void)setClassificationsDetected:(id)json
{
[self _updateOptionSettingForKey:GMVDetectorFaceClassificationType withJSONValue:json];
}
- (void)setMode:(id)json
{
[self _updateOptionSettingForKey:GMVDetectorFaceMode withJSONValue:json];
}
# pragma mark - Public API
- (void)maybeStartFaceDetectionOnSession:(AVCaptureSession *)session withPreviewLayer:(AVCaptureVideoPreviewLayer *)previewLayer
{
_session = session;
_previewLayer = previewLayer;
[self tryEnablingFaceDetection];
}
- (void)tryEnablingFaceDetection
{
if (!_session) {
return;
}
[_session beginConfiguration];
if ([self isDetectingFaces]) {
@try {
GMVDetector *faceDetector = [GMVDetector detectorOfType:GMVDetectorTypeFace options:_faceDetectorOptions];
GMVDataOutput *dataOutput = [[GMVMultiDataOutput alloc] initWithDetector:faceDetector];
[dataOutput setDataDelegate:self];
if ([_session canAddOutput:dataOutput]) {
[_session addOutput:dataOutput];
_dataOutput = dataOutput;
_connected = true;
}
_previousFacesCount = -1;
[self _notifyOfFaces:nil];
} @catch (NSException *exception) {
RCTLogWarn(@"%@", [exception description]);
}
}
[_session commitConfiguration];
}
- (void)stopFaceDetection
{
if (!_session) {
return;
}
[_session beginConfiguration];
if ([_session.outputs containsObject:_dataOutput]) {
[_session removeOutput:_dataOutput];
[_dataOutput cleanup];
_dataOutput = nil;
_connected = false;
}
[_session commitConfiguration];
if ([self isDetectingFaces]) {
_previousFacesCount = -1;
[self _notifyOfFaces:nil];
}
}
# pragma mark Private API
- (void)_resetFaceDetector
{
[self stopFaceDetection];
[self tryEnablingFaceDetection];
}
- (void)_notifyOfFaces:(NSArray<NSDictionary *> *)faces
{
NSArray<NSDictionary *> *reportableFaces = faces == nil ? @[] : faces;
// Send event when there are faces that have been detected ([faces count] > 0)
// or if the listener may think that there are still faces in the video (_prevCount > 0)
// or if we really want the event to be sent, eg. to reset listener info (_prevCount == -1).
if ([reportableFaces count] > 0 || _previousFacesCount != 0) {
if (_delegate) {
[_delegate onFacesDetected:reportableFaces];
}
// Maybe if the delegate is not present anymore we should disable encoding,
// however this should never happen.
_previousFacesCount = [reportableFaces count];
}
}
# pragma mark - Utilities
- (long)_getLongOptionValueForKey:(NSString *)key
{
return [(NSNumber *)[_faceDetectorOptions valueForKey:key] longValue];
}
- (void)_updateOptionSettingForKey:(NSString *)key withJSONValue:(id)json
{
long requestedValue = [RCTConvert NSInteger:json];
long currentValue = [self _getLongOptionValueForKey:key];
if (requestedValue != currentValue) {
[_faceDetectorOptions setValue:@(requestedValue) forKey:key];
[self _runBlockIfQueueIsPresent:^{
[self _resetFaceDetector];
}];
}
}
- (void)_runBlockIfQueueIsPresent:(void (^)(void))block
{
if (_sessionQueue) {
dispatch_async(_sessionQueue, block);
}
}
#pragma mark - GMVDataOutputDelegate
- (void)dataOutput:(GMVDataOutput *)dataOutput didFinishedDetection:(NSArray<__kindof GMVFeature *> *)results
{
// Calling dataOutput:didFinishedDetection with dataOutput that in videoSettings has no information about
// width or height started happen after refactor: moving face detection logic from EXCameraManager to EXFaceDetectorManager.
// I suppose no information is provided because data output is already disconnected from the input and it has no
// information about the source. Let's reset the information then.
if (!_connected) {
[self _notifyOfFaces:nil];
return;
}
AVCaptureVideoOrientation interfaceVideoOrientation = _previewLayer.connection.videoOrientation;
CGAffineTransform transform = [RNFaceDetectorUtils transformFromDeviceOutput:dataOutput toInterfaceVideoOrientation:interfaceVideoOrientation];
RNFaceEncoder *faceEncoder = [[RNFaceEncoder alloc] initWithTransform:transform];
NSMutableArray<NSDictionary *> *encodedFaces = [NSMutableArray arrayWithCapacity:[results count]];
[results enumerateObjectsUsingBlock:^(GMVFeature * _Nonnull feature, NSUInteger _idx, BOOL * _Nonnull _stop) {
if([feature isKindOfClass:[GMVFaceFeature class]]) {
GMVFaceFeature *face = (GMVFaceFeature *)feature;
[encodedFaces addObject:[faceEncoder encode:face]];
}
}];
[self _notifyOfFaces:encodedFaces];
}
# pragma mark - Default options
+ (NSDictionary *)_getDefaultFaceDetectorOptions
{
if (defaultFaceDetectorOptions == nil) {
[self _initDefaultFaceDetectorOptions];
}
return defaultFaceDetectorOptions;
}
+ (void)_initDefaultFaceDetectorOptions
{
defaultFaceDetectorOptions = @{
GMVDetectorFaceTrackingEnabled : @(YES),
GMVDetectorFaceMode : @(GMVDetectorFaceFastMode),
GMVDetectorFaceLandmarkType : @(GMVDetectorFaceLandmarkNone),
GMVDetectorFaceClassificationType : @(GMVDetectorFaceClassificationNone),
GMVDetectorFaceMinSize : @(0.15)
};
}
@end

View File

@ -0,0 +1,12 @@
//
// RNFaceDetectorModule.h
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import <React/RCTBridgeModule.h>
#import <GoogleMobileVision/GoogleMobileVision.h>
@interface RNFaceDetectorModule : NSObject <RCTBridgeModule>
@end

View File

@ -0,0 +1,195 @@
//
// RNFaceDetectorModule.m
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import "RNFaceDetectorModule.h"
#import "RNFaceEncoder.h"
#import "RNFileSystem.h"
#import "RNFaceDetectorUtils.h"
static const NSString *kModeOptionName = @"mode";
static const NSString *kDetectLandmarksOptionName = @"detectLandmarks";
static const NSString *kRunClassificationsOptionName = @"runClassifications";
@implementation RNFaceDetectorModule
static NSFileManager *fileManager = nil;
static NSDictionary *defaultDetectorOptions = nil;
- (instancetype)init
{
self = [super init];
if (self) {
fileManager = [NSFileManager defaultManager];
}
return self;
}
RCT_EXPORT_MODULE(ReactNativeFaceDetector);
@synthesize bridge = _bridge;
- (void)setBridge:(RCTBridge *)bridge
{
_bridge = bridge;
}
+ (BOOL)requiresMainQueueSetup
{
return NO;
}
- (NSDictionary *)constantsToExport
{
return [RNFaceDetectorUtils constantsToExport];
}
RCT_EXPORT_METHOD(detectFaces:(nonnull NSDictionary *)options
resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject)
{
NSString *uri = options[@"uri"];
if (uri == nil) {
reject(@"E_FACE_DETECTION_FAILED", @"You must define a URI.", nil);
return;
}
NSURL *url = [NSURL URLWithString:uri];
NSString *path = [url.path stringByStandardizingPath];
// if (!([self.bridge.scopedModules.fileSystem permissionsForURI:url] & EXFileSystemPermissionRead)) {
// reject(@"E_FILESYSTEM_PERMISSIONS", [NSString stringWithFormat:@"File '%@' isn't readable.", uri], nil);
// return;
// }
@try {
GMVDetector *detector = [[self class] detectorForOptions:options];
if (![fileManager fileExistsAtPath:path]) {
reject(@"E_FACE_DETECTION_FAILED", [NSString stringWithFormat:@"The file does not exist. Given path: `%@`.", path], nil);
return;
}
UIImage *image = [[UIImage alloc] initWithContentsOfFile:path];
NSDictionary *detectionOptions = [[self class] detectionOptionsForImage:image];
NSArray<GMVFaceFeature *> *faces = [detector featuresInImage:image options:detectionOptions];
RNFaceEncoder *faceEncoder = [[RNFaceEncoder alloc] init];
NSMutableArray<NSDictionary *> *encodedFaces = [NSMutableArray arrayWithCapacity:[faces count]];
[faces enumerateObjectsUsingBlock:^(GMVFaceFeature * _Nonnull face, NSUInteger _idx, BOOL * _Nonnull _stop) {
[encodedFaces addObject:[faceEncoder encode:face]];
}];
resolve(@{
@"faces" : encodedFaces,
@"image" : @{
@"uri" : options[@"uri"],
@"width" : @(image.size.width),
@"height" : @(image.size.height),
@"orientation" : @([RNFaceDetectorModule exifOrientationFor:image.imageOrientation])
}
});
} @catch (NSException *exception) {
reject(@"E_FACE_DETECTION_FAILED", [exception description], nil);
}
}
+ (GMVDetector *)detectorForOptions:(NSDictionary *)options
{
NSMutableDictionary *parsedOptions = [[NSMutableDictionary alloc] initWithDictionary:[self getDefaultDetectorOptions]];
if (options[kDetectLandmarksOptionName]) {
[parsedOptions setObject:options[kDetectLandmarksOptionName] forKey:GMVDetectorFaceLandmarkType];
}
if (options[kModeOptionName]) {
[parsedOptions setObject:options[kModeOptionName] forKey:GMVDetectorFaceMode];
}
if (options[kRunClassificationsOptionName]) {
[parsedOptions setObject:options[kRunClassificationsOptionName] forKey:GMVDetectorFaceClassificationType];
}
return [GMVDetector detectorOfType:GMVDetectorTypeFace options:parsedOptions];
}
# pragma mark: - Detector default options getter and initializer
+ (NSDictionary *)getDefaultDetectorOptions
{
if (defaultDetectorOptions == nil) {
[self initDefaultDetectorOptions];
}
return defaultDetectorOptions;
}
+ (void)initDefaultDetectorOptions
{
defaultDetectorOptions = @{
GMVDetectorFaceMode : @(GMVDetectorFaceAccurateMode),
GMVDetectorFaceLandmarkType : @(GMVDetectorFaceLandmarkAll),
GMVDetectorFaceClassificationType : @(GMVDetectorFaceClassificationAll)
};
}
# pragma mark: - Utility methods
+ (NSDictionary *)detectionOptionsForImage:(UIImage *)image
{
return @{
GMVDetectorImageOrientation : @([[self class] gmvImageOrientationFor:image.imageOrientation]),
};
}
// As the documentation (http://cocoadocs.org/docsets/GoogleMobileVision/1.0.2/Constants/GMVImageOrientation.html) suggests
// the value of GMVImageOrientation is the same as the value defined by EXIF specifications, so we can adapt
// https://gist.github.com/steipete/4666527 to our needs.
+ (GMVImageOrientation)gmvImageOrientationFor:(UIImageOrientation)orientation
{
switch (orientation) {
case UIImageOrientationUp:
return GMVImageOrientationTopLeft;
case UIImageOrientationDown:
return GMVImageOrientationBottomRight;
case UIImageOrientationLeft:
return GMVImageOrientationLeftBottom;
case UIImageOrientationRight:
return GMVImageOrientationRightTop;
case UIImageOrientationUpMirrored:
return GMVImageOrientationTopRight;
case UIImageOrientationDownMirrored:
return GMVImageOrientationBottomLeft;
case UIImageOrientationLeftMirrored:
return GMVImageOrientationLeftTop;
case UIImageOrientationRightMirrored:
return GMVImageOrientationRightBottom;
}
}
// https://gist.github.com/steipete/4666527
+ (int)exifOrientationFor:(UIImageOrientation)orientation
{
switch (orientation) {
case UIImageOrientationUp:
return 1;
case UIImageOrientationDown:
return 3;
case UIImageOrientationLeft:
return 8;
case UIImageOrientationRight:
return 6;
case UIImageOrientationUpMirrored:
return 2;
case UIImageOrientationDownMirrored:
return 4;
case UIImageOrientationLeftMirrored:
return 5;
case UIImageOrientationRightMirrored:
return 7;
}
}
@end

View File

@ -0,0 +1,17 @@
//
// RNFaceDetectorPointTransformCalculator.h
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
@interface RNFaceDetectorPointTransformCalculator : NSObject
- (instancetype)initToTransformFromOrientation:(AVCaptureVideoOrientation)orientation toOrientation:(AVCaptureVideoOrientation)toOrientation forVideoWidth:(CGFloat)videoWidth andVideoHeight:(CGFloat)videoHeight;
- (CGAffineTransform)transform;
@end

View File

@ -0,0 +1,277 @@
//
// RNFaceDetectorPointTransformCalculator.m
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import "RNFaceDetectorPointTransformCalculator.h"
#define cDefaultFloatComparisonEpsilon 0.0001
#define cModEqualFloatsWithEpsilon(dividend, divisor, modulo, epsilon) \
fabs( fmod(dividend, divisor) - modulo ) < epsilon
#define cModEqualFloats(dividend, divisor, modulo) \
cModEqualFloatsWithEpsilon(dividend, divisor, modulo, cDefaultFloatComparisonEpsilon)
/*
* The purpose of this class is to calculate the transform used to translate
* face detected by Google Mobile Vision to proper view coordinates.
*
* When an Expo app locks interface orientatation in `app.json` or with `ScreenOrientation.allow`,
* interface gets locked, but device orientation still can change. It looks like Google Mobile Vision
* listens to device orientation changes and transforms coordinates of faces as if the device orientation
* always equals interface orientation (which in Expo is not the case).
*
* Let's see the behavior on a specific example. Imagine an app with screen orientation locked to portrait.
*
* ```
* +---+
* |^^ | // by ^^ we shall denote a happy face, ^^
* | |
* | |
* +---+
* - // by - we shall denote the bottom of the interface.
* ```
*
* When the device is being held like this face is properly reported in (0, 0).
* However, when we rotate the device to landscape, the situation looks like this:
*
* ```
* +---------------+
* |^^ x| // by xx we shall where the face should by according to GMV detector.
* || x| // note that interface is still portrait-oriented
* | |
* +---------------+
* ```
*
* For GMV, which thinks that the interface is in landscape (`UIDeviceOrientation` changed to landscape)
* the face is in `(0, 0)`. However, for our app `(0, 0)` is in the top left corner of the device --
* -- that's where the face indicator gets positioned.
*
* That's when we have to rotate and translate the face indicator. Here we have to rotate it by -90 degrees.
*
* ```
* +---------------+
* |^^ |xx // something is still wrong
* || |
* | |
* +---------------+
* ```
*
* Not only must we rotate the indicator, we also have to translate it. Here by (-videoWidth, 0).
*
* ```
* +---------------+
* |** | // detected eyes glow inside the face indicator
* || |
* | |
* +---------------+
* ```
*
* Fixing this issue is the purpose of this whole class.
*
*/
typedef NS_ENUM(NSInteger, RNTranslationEnum) {
RNTranslateYNegativeWidth,
RNTranslateXNegativeHeight,
RNTranslateXYNegative,
RNTranslateYXNegative
};
@interface RNFaceDetectorPointTransformCalculator()
@property (assign, nonatomic) AVCaptureVideoOrientation fromOrientation;
@property (assign, nonatomic) AVCaptureVideoOrientation toOrientation;
@property (assign, nonatomic) CGFloat videoWidth;
@property (assign, nonatomic) CGFloat videoHeight;
@end
@implementation RNFaceDetectorPointTransformCalculator
- (instancetype)initToTransformFromOrientation:(AVCaptureVideoOrientation)fromOrientation toOrientation:(AVCaptureVideoOrientation)toOrientation forVideoWidth:(CGFloat)videoWidth andVideoHeight:(CGFloat)videoHeight
{
self = [super init];
if (self) {
_fromOrientation = fromOrientation;
_toOrientation = toOrientation;
_videoWidth = videoWidth;
_videoHeight = videoHeight;
}
return self;
}
- (CGFloat)rotation
{
if (_fromOrientation == _toOrientation) {
return 0;
}
AVCaptureVideoOrientation firstOrientation = MIN(_fromOrientation, _toOrientation);
AVCaptureVideoOrientation secondOrientation = MAX(_fromOrientation, _toOrientation);
CGFloat angle = [[[self class] getRotationDictionary][@(firstOrientation)][@(secondOrientation)] doubleValue];
/*
* It turns out that if you need to rotate the indicator by -90 degrees to get it from
* landscape left (Device orientation) to portrait (Interface Orientation),
* to get the indicator from portrait (D) to landscape left (I), you need to rotate it by 90 degrees.
* Same analogy `r(1, 2) == x <==> r(2, 1) == -x` is true for every other transformation.
*/
if (_fromOrientation > _toOrientation) {
angle = -angle;
}
return angle;
}
- (CGPoint)translation
{
if (_fromOrientation == _toOrientation) {
return CGPointZero;
}
AVCaptureVideoOrientation firstOrientation = MIN(_fromOrientation, _toOrientation);
AVCaptureVideoOrientation secondOrientation = MAX(_fromOrientation, _toOrientation);
RNTranslationEnum enumValue = [[[self class] getTranslationDictionary][@(firstOrientation)][@(secondOrientation)] intValue];
CGPoint translation = [self translationForEnum:enumValue];
/*
* Here the analogy is a little bit more complicated than when calculating rotation.
* It turns out that if you need to translate the _rotated_ indicator
* from landscape left (D) to portrait (I) by `(-videoWidth, 0)` (see top class comment),
* to translate the rotated indicator from portrait (D) to landscape left (D) you need to translate it
* by `(0, -videoWidth)`.
*
* ```
* +-------+
* +--------------------+ |^^ | // ^^ == happy face
* |^^ | | |
* | | | |
* | | | || // | or - == bottom of the interface
* | | | |
* | | |x | // xx == initial face indicator
* +--------------------+ |x |
* - +-------+
* oo // oo == rotated face indicator
* ```
*
* As we can see, the indicator has to be translated by `(0, -videoWidth)` to match with the happy face.
*
* It turns out, that `(0, -videoWidth) == translation(device: 1, interface: 4)` can be calculated by
* rotating `translation(device: 4, interface: 1) == (-videoWidth, 0)` by `rotation(4, 1) == -90deg`.
*
* One might think that the same analogy `t(1, 2) == r(2, 1)[t(2, 1)]` works always,
* but here this assumption would be wrong. The analogy works only when device and interface rotations
* differ by 90 or -90 degrees.
*
* Otherwise (when transforming from/to portrait/upside or landscape left/right)
* `translation(1, 2) == translation(2, 1).
*/
if (_fromOrientation > _toOrientation) {
CGFloat translationRotationAngle = [self rotation];
if (cModEqualFloats(translationRotationAngle + M_PI, M_PI, M_PI_2)) {
CGAffineTransform transform = CGAffineTransformIdentity;
transform = CGAffineTransformRotate(transform, translationRotationAngle);
translation = CGPointApplyAffineTransform(translation, transform);
}
}
return translation;
}
- (CGAffineTransform)transform
{
CGAffineTransform transform = CGAffineTransformIdentity;
CGFloat rotation = [self rotation];
transform = CGAffineTransformRotate(transform, rotation);
CGPoint translation = [self translation];
transform = CGAffineTransformTranslate(transform, translation.x, translation.y);
return transform;
}
# pragma mark - Enum conversion
- (CGPoint)translationForEnum:(RNTranslationEnum)enumValue
{
switch (enumValue) {
case RNTranslateXNegativeHeight:
return CGPointMake(-_videoHeight, 0);
case RNTranslateYNegativeWidth:
return CGPointMake(0, -_videoWidth);
case RNTranslateXYNegative:
return CGPointMake(-_videoWidth, -_videoHeight);
case RNTranslateYXNegative:
return CGPointMake(-_videoHeight, -_videoWidth);
}
}
# pragma mark - Lookup tables
static NSDictionary<NSNumber *, NSDictionary<NSNumber *, NSNumber *> *> *rotationDictionary = nil;
static NSDictionary<NSNumber *, NSDictionary<NSNumber *, NSNumber *> *> *translationDictionary = nil;
+ (NSDictionary<NSNumber *, NSDictionary<NSNumber *, NSNumber *> *> *) getRotationDictionary
{
if (rotationDictionary == nil) {
[self initRotationDictionary];
}
return rotationDictionary;
}
+ (NSDictionary<NSNumber *, NSDictionary<NSNumber *, NSNumber *> *> *) getTranslationDictionary
{
if (translationDictionary == nil) {
[self initTranslationDictionary];
}
return translationDictionary;
}
# pragma mark - Initialize dictionaries
// If you wonder why this dictionary is half-empty, see comment inside `- (CGFloat)rotation`. It may help you.
+ (void)initRotationDictionary
{
rotationDictionary = @{
@(AVCaptureVideoOrientationPortrait): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(M_PI_2),
@(AVCaptureVideoOrientationLandscapeRight) : @(-M_PI_2),
@(AVCaptureVideoOrientationPortraitUpsideDown) : @(M_PI),
},
@(AVCaptureVideoOrientationPortraitUpsideDown): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(-M_PI_2),
@(AVCaptureVideoOrientationLandscapeRight) : @(M_PI_2)
},
@(AVCaptureVideoOrientationLandscapeRight): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(M_PI)
}
};
}
// If you wonder why this dictionary is half-empty, see comment inside `- (CGPoint)translation`. It may help you.
+ (void)initTranslationDictionary
{
translationDictionary = @{
@(AVCaptureVideoOrientationPortrait): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(RNTranslateYNegativeWidth),
@(AVCaptureVideoOrientationLandscapeRight) : @(RNTranslateXNegativeHeight),
@(AVCaptureVideoOrientationPortraitUpsideDown) : @(RNTranslateYXNegative)
},
@(AVCaptureVideoOrientationPortraitUpsideDown): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(RNTranslateXNegativeHeight),
@(AVCaptureVideoOrientationLandscapeRight) : @(RNTranslateYNegativeWidth)
},
@(AVCaptureVideoOrientationLandscapeRight): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(RNTranslateXYNegative)
}
};
}
@end

View File

@ -0,0 +1,35 @@
//
// RNFaceDetectorUtils.h
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import <UIKit/UIKit.h>
#import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <GoogleMVDataOutput/GoogleMVDataOutput.h>
typedef NS_ENUM(NSInteger, RNFaceDetectionMode) {
RNFaceDetectionFastMode = GMVDetectorFaceFastMode,
RNFaceDetectionAccurateMode = GMVDetectorFaceAccurateMode
};
typedef NS_ENUM(NSInteger, RNFaceDetectionLandmarks) {
RNFaceDetectAllLandmarks = GMVDetectorFaceLandmarkAll,
RNFaceDetectNoLandmarks = GMVDetectorFaceLandmarkNone
};
typedef NS_ENUM(NSInteger, RNFaceDetectionClassifications) {
RNFaceRunAllClassifications = GMVDetectorFaceClassificationAll,
RNFaceRunNoClassifications = GMVDetectorFaceClassificationNone
};
@interface RNFaceDetectorUtils : NSObject
+ (NSDictionary *)constantsToExport;
+ (CGAffineTransform)transformFromDeviceOutput:(GMVDataOutput *)dataOutput toInterfaceVideoOrientation:(AVCaptureVideoOrientation)interfaceVideoOrientation;
@end

View File

@ -0,0 +1,77 @@
//
// RNFaceDetectorUtils.m
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import "RNCameraUtils.h"
#import "RNFaceDetectorUtils.h"
#import "RNFaceDetectorPointTransformCalculator.h"
NSString *const RNGMVDataOutputWidthKey = @"Width";
NSString *const RNGMVDataOutputHeightKey = @"Height";
@implementation RNFaceDetectorUtils
+ (NSDictionary *)constantsToExport
{
return @{
@"Mode" : @{
@"fast" : @(RNFaceDetectionFastMode),
@"accurate" : @(RNFaceDetectionAccurateMode)
},
@"Landmarks" : @{
@"all" : @(RNFaceDetectAllLandmarks),
@"none" : @(RNFaceDetectNoLandmarks)
},
@"Classifications" : @{
@"all" : @(RNFaceRunAllClassifications),
@"none" : @(RNFaceRunNoClassifications)
}
};
}
# pragma mark - GMVDataOutput transformations
+ (CGAffineTransform)transformFromDeviceVideoOrientation:(AVCaptureVideoOrientation)deviceVideoOrientation toInterfaceVideoOrientation:(AVCaptureVideoOrientation)interfaceVideoOrientation videoWidth:(NSNumber *)width videoHeight:(NSNumber *)height
{
RNFaceDetectorPointTransformCalculator *calculator = [[RNFaceDetectorPointTransformCalculator alloc] initToTransformFromOrientation:deviceVideoOrientation toOrientation:interfaceVideoOrientation forVideoWidth:[width floatValue] andVideoHeight:[height floatValue]];
return [calculator transform];
}
// Normally we would use `dataOutput.xScale`, `.yScale` and `.offset`.
// Unfortunately, it turns out that using these attributes results in different results
// on iPhone {6, 7} and iPhone 5S. On newer iPhones the transform works properly,
// whereas on iPhone 5S the scale is too big (~0.7, while it should be ~0.4) and the offset
// moves the face points away. This workaround (using screen + orientation + video resolution
// to calculate proper scale) has been proven to work all three devices.
+ (CGAffineTransform)transformFromDeviceOutput:(GMVDataOutput *)dataOutput withInterfaceOrientation:(AVCaptureVideoOrientation)interfaceVideoOrientation
{
UIScreen *mainScreen = [UIScreen mainScreen];
BOOL interfaceIsLandscape = interfaceVideoOrientation == AVCaptureVideoOrientationLandscapeLeft || interfaceVideoOrientation == AVCaptureVideoOrientationLandscapeRight;
CGFloat interfaceWidth = interfaceIsLandscape ? mainScreen.bounds.size.height : mainScreen.bounds.size.width;
CGFloat interfaceHeight = interfaceIsLandscape ? mainScreen.bounds.size.width : mainScreen.bounds.size.height;
CGFloat xScale = interfaceWidth / [(NSNumber *)dataOutput.videoSettings[RNGMVDataOutputHeightKey] floatValue];
CGFloat yScale = interfaceHeight / [(NSNumber *)dataOutput.videoSettings[RNGMVDataOutputWidthKey] floatValue];
CGAffineTransform dataOutputTransform = CGAffineTransformIdentity;
dataOutputTransform = CGAffineTransformScale(dataOutputTransform, xScale, yScale);
return dataOutputTransform;
}
+ (CGAffineTransform)transformFromDeviceOutput:(GMVDataOutput *)dataOutput toInterfaceVideoOrientation:(AVCaptureVideoOrientation)interfaceVideoOrientation
{
UIDeviceOrientation currentDeviceOrientation = [[UIDevice currentDevice] orientation];
AVCaptureVideoOrientation deviceVideoOrientation = [RNCameraUtils videoOrientationForDeviceOrientation:currentDeviceOrientation];
NSNumber *videoWidth = dataOutput.videoSettings[RNGMVDataOutputWidthKey];
NSNumber *videoHeight = dataOutput.videoSettings[RNGMVDataOutputHeightKey];
CGAffineTransform interfaceTransform = [self transformFromDeviceVideoOrientation:deviceVideoOrientation toInterfaceVideoOrientation:interfaceVideoOrientation videoWidth:videoWidth videoHeight:videoHeight];
CGAffineTransform dataOutputTransform = [self transformFromDeviceOutput:dataOutput withInterfaceOrientation:interfaceVideoOrientation];
return CGAffineTransformConcat(interfaceTransform, dataOutputTransform);
}
@end

View File

@ -0,0 +1,17 @@
//
// RNFaceEncoder.h
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import <UIKit/UIKit.h>
#import <GoogleMobileVision/GoogleMobileVision.h>
@interface RNFaceEncoder : NSObject
- (instancetype)initWithTransform:(CGAffineTransform)transform;
- (NSDictionary *)encode:(GMVFaceFeature *)face;
@end

View File

@ -0,0 +1,119 @@
//
// RNFaceEncoder.m
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import "RNFaceEncoder.h"
#define cDefaultFloatComparisonEpsilon 0.0001
#define cModEqualFloatsWithEpsilon(dividend, divisor, modulo, epsilon) \
fabs( fmod(dividend, divisor) - modulo ) < epsilon
#define cModEqualFloats(dividend, divisor, modulo) \
cModEqualFloatsWithEpsilon(dividend, divisor, modulo, cDefaultFloatComparisonEpsilon)
@interface RNFaceEncoder()
@property (assign, nonatomic) BOOL swapWidthAndHeight;
@property (assign, nonatomic) CGAffineTransform transform;
@property (assign, nonatomic) CGFloat rollAngleDegreesFromTransform;
@end
@implementation RNFaceEncoder
- (instancetype)init
{
return [self initWithTransform:CGAffineTransformIdentity];
}
- (instancetype)initWithTransform:(CGAffineTransform)transform
{
self = [super init];
if (self) {
_transform = transform;
_rollAngleDegreesFromTransform = [self radianAngleToDegrees:[self rollAngleFromTransform:_transform]];
_swapWidthAndHeight = cModEqualFloats(_rollAngleDegreesFromTransform + 360, 180, 90);
}
return self;
}
- (NSDictionary *)encode:(GMVFaceFeature *)face
{
CGRect bounds = CGRectApplyAffineTransform(face.bounds, _transform);
NSDictionary *initialDictionary = @{
@"bounds" : @{
@"size" : @{
@"width" : @(_swapWidthAndHeight ? bounds.size.height : bounds.size.width),
@"height" : @(_swapWidthAndHeight ? bounds.size.width : bounds.size.height)
},
@"origin" : @{
@"x" : @(bounds.origin.x),
@"y" : @(bounds.origin.y)
}
}
};
NSMutableDictionary *encodedFace = [[NSMutableDictionary alloc] initWithDictionary:initialDictionary];
[self putAFloat:face.smilingProbability forKey:@"smilingProbability" toDictionary:encodedFace ifValueIsValid:face.hasSmilingProbability];
[self putAnInteger:face.trackingID forKey:@"faceID" toDictionary:encodedFace ifValueIsValid:face.hasTrackingID];
[self putAPoint:face.leftEarPosition forKey:@"leftEarPosition" toDictionary:encodedFace ifValueIsValid:face.hasLeftEarPosition];
[self putAPoint:face.rightEarPosition forKey:@"rightEarPosition" toDictionary:encodedFace ifValueIsValid:face.hasRightEarPosition];
[self putAPoint:face.leftEyePosition forKey:@"leftEyePosition" toDictionary:encodedFace ifValueIsValid:face.hasLeftEyePosition];
[self putAFloat:face.leftEyeOpenProbability forKey:@"leftEyeOpenProbability" toDictionary:encodedFace ifValueIsValid:face.hasLeftEyeOpenProbability];
[self putAPoint:face.rightEyePosition forKey:@"rightEyePosition" toDictionary:encodedFace ifValueIsValid:face.hasRightEyePosition];
[self putAFloat:face.rightEyeOpenProbability forKey:@"rightEyeOpenProbability" toDictionary:encodedFace ifValueIsValid:face.hasRightEyeOpenProbability];
[self putAPoint:face.leftCheekPosition forKey:@"leftCheekPosition" toDictionary:encodedFace ifValueIsValid:face.hasLeftCheekPosition];
[self putAPoint:face.rightCheekPosition forKey:@"rightCheekPosition" toDictionary:encodedFace ifValueIsValid:face.hasRightCheekPosition];
[self putAPoint:face.leftMouthPosition forKey:@"leftMouthPosition" toDictionary:encodedFace ifValueIsValid:face.hasLeftMouthPosition];
[self putAPoint:face.mouthPosition forKey:@"mouthPosition" toDictionary:encodedFace ifValueIsValid:face.hasMouthPosition];
[self putAPoint:face.rightMouthPosition forKey:@"rightMouthPosition" toDictionary:encodedFace ifValueIsValid:face.hasRightMouthPosition];
[self putAPoint:face.bottomMouthPosition forKey:@"bottomMouthPosition" toDictionary:encodedFace ifValueIsValid:face.hasBottomMouthPosition];
[self putAPoint:face.noseBasePosition forKey:@"noseBasePosition" toDictionary:encodedFace ifValueIsValid:face.hasNoseBasePosition];
[self putAFloat:face.headEulerAngleY forKey:@"yawAngle" toDictionary:encodedFace ifValueIsValid:face.hasHeadEulerAngleY];
[self putAFloat:-(face.headEulerAngleZ - _rollAngleDegreesFromTransform) forKey:@"rollAngle" toDictionary:encodedFace ifValueIsValid:face.hasHeadEulerAngleZ];
return encodedFace;
}
- (void)putAPoint:(CGPoint)point forKey:(NSString *)key toDictionary:(NSMutableDictionary *)dictionary ifValueIsValid:(BOOL)pointIsValid
{
if (pointIsValid) {
CGPoint transformedPoint = CGPointApplyAffineTransform(point, _transform);
[dictionary setObject:@{ @"x" : @(transformedPoint.x), @"y" : @(transformedPoint.y) } forKey:key];
}
}
- (void)putAFloat:(CGFloat)value forKey:(NSString *)key toDictionary:(NSMutableDictionary *)dictionary ifValueIsValid:(BOOL)floatIsValid
{
if (floatIsValid) {
[dictionary setObject:@(value) forKey:key];
}
}
- (void)putAnInteger:(NSUInteger)value forKey:(NSString *)key toDictionary:(NSMutableDictionary *)dictionary ifValueIsValid:(BOOL)integerIsValid
{
if (integerIsValid) {
[dictionary setObject:@(value) forKey:key];
}
}
- (CGFloat)rollAngleFromTransform:(CGAffineTransform)transform
{
return atan2f(transform.b, transform.a);
}
- (CGFloat)radianAngleToDegrees:(CGFloat)angle
{
return angle * (180 / M_PI);
}
@end

View File

@ -11,6 +11,11 @@
4107014D1ACB732B00C6AA39 /* RCTCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = 410701481ACB732B00C6AA39 /* RCTCamera.m */; };
4107014E1ACB732B00C6AA39 /* RCTCameraManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 4107014A1ACB732B00C6AA39 /* RCTCameraManager.m */; };
454EBCF41B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m in Sources */ = {isa = PBXBuildFile; fileRef = 454EBCF31B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m */; };
7147DBB32015319E003C59C3 /* RNFaceDetectorManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 7147DBB22015319E003C59C3 /* RNFaceDetectorManager.m */; };
7147DBB620155340003C59C3 /* RNFaceDetectorModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 7147DBB520155340003C59C3 /* RNFaceDetectorModule.m */; };
7147DBB9201553EE003C59C3 /* RNFaceDetectorPointTransformCalculator.m in Sources */ = {isa = PBXBuildFile; fileRef = 7147DBB8201553EE003C59C3 /* RNFaceDetectorPointTransformCalculator.m */; };
7147DBBC20155594003C59C3 /* RNFaceDetectorUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = 7147DBBB20155594003C59C3 /* RNFaceDetectorUtils.m */; };
7147DBBF20155694003C59C3 /* RNFaceEncoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 7147DBBE20155694003C59C3 /* RNFaceEncoder.m */; };
7162BE672013EAA100FE51FF /* RNCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = 71C7FFCC2013C7BF006EB75A /* RNCamera.m */; };
7162BE682013EAA400FE51FF /* RNCameraManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 71C7FFC92013C7AE006EB75A /* RNCameraManager.m */; };
71C7FFD02013C7E5006EB75A /* RNCameraUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = 71C7FFCF2013C7E5006EB75A /* RNCameraUtils.m */; };
@ -40,6 +45,16 @@
410701491ACB732B00C6AA39 /* RCTCameraManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTCameraManager.h; sourceTree = "<group>"; };
4107014A1ACB732B00C6AA39 /* RCTCameraManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTCameraManager.m; sourceTree = "<group>"; };
454EBCF31B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "NSMutableDictionary+ImageMetadata.m"; sourceTree = "<group>"; };
7147DBB12015319E003C59C3 /* RNFaceDetectorManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFaceDetectorManager.h; sourceTree = "<group>"; };
7147DBB22015319E003C59C3 /* RNFaceDetectorManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFaceDetectorManager.m; sourceTree = "<group>"; };
7147DBB420155340003C59C3 /* RNFaceDetectorModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFaceDetectorModule.h; sourceTree = "<group>"; };
7147DBB520155340003C59C3 /* RNFaceDetectorModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFaceDetectorModule.m; sourceTree = "<group>"; };
7147DBB7201553EE003C59C3 /* RNFaceDetectorPointTransformCalculator.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFaceDetectorPointTransformCalculator.h; sourceTree = "<group>"; };
7147DBB8201553EE003C59C3 /* RNFaceDetectorPointTransformCalculator.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFaceDetectorPointTransformCalculator.m; sourceTree = "<group>"; };
7147DBBA20155594003C59C3 /* RNFaceDetectorUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFaceDetectorUtils.h; sourceTree = "<group>"; };
7147DBBB20155594003C59C3 /* RNFaceDetectorUtils.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFaceDetectorUtils.m; sourceTree = "<group>"; };
7147DBBD20155694003C59C3 /* RNFaceEncoder.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFaceEncoder.h; sourceTree = "<group>"; };
7147DBBE20155694003C59C3 /* RNFaceEncoder.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFaceEncoder.m; sourceTree = "<group>"; };
71C7FFC82013C7AE006EB75A /* RNCameraManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNCameraManager.h; sourceTree = "<group>"; };
71C7FFC92013C7AE006EB75A /* RNCameraManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNCameraManager.m; sourceTree = "<group>"; };
71C7FFCB2013C7BF006EB75A /* RNCamera.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNCamera.h; sourceTree = "<group>"; };
@ -68,6 +83,7 @@
410701241ACB719800C6AA39 = {
isa = PBXGroup;
children = (
7147DBB02015317E003C59C3 /* FaceDetector */,
714166162013E1B600EE9FCC /* RN */,
714166152013E19D00EE9FCC /* RCT */,
410701301ACB723B00C6AA39 /* Products */,
@ -115,6 +131,23 @@
path = RN;
sourceTree = "<group>";
};
7147DBB02015317E003C59C3 /* FaceDetector */ = {
isa = PBXGroup;
children = (
7147DBB12015319E003C59C3 /* RNFaceDetectorManager.h */,
7147DBB22015319E003C59C3 /* RNFaceDetectorManager.m */,
7147DBB420155340003C59C3 /* RNFaceDetectorModule.h */,
7147DBB520155340003C59C3 /* RNFaceDetectorModule.m */,
7147DBB7201553EE003C59C3 /* RNFaceDetectorPointTransformCalculator.h */,
7147DBB8201553EE003C59C3 /* RNFaceDetectorPointTransformCalculator.m */,
7147DBBA20155594003C59C3 /* RNFaceDetectorUtils.h */,
7147DBBB20155594003C59C3 /* RNFaceDetectorUtils.m */,
7147DBBD20155694003C59C3 /* RNFaceEncoder.h */,
7147DBBE20155694003C59C3 /* RNFaceEncoder.m */,
);
path = FaceDetector;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
@ -172,11 +205,16 @@
files = (
0314E39D1B661A460092D183 /* CameraFocusSquare.m in Sources */,
454EBCF41B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m in Sources */,
7147DBBC20155594003C59C3 /* RNFaceDetectorUtils.m in Sources */,
71C7FFD62013C824006EB75A /* RNFileSystem.m in Sources */,
7147DBB9201553EE003C59C3 /* RNFaceDetectorPointTransformCalculator.m in Sources */,
4107014E1ACB732B00C6AA39 /* RCTCameraManager.m in Sources */,
4107014D1ACB732B00C6AA39 /* RCTCamera.m in Sources */,
7147DBBF20155694003C59C3 /* RNFaceEncoder.m in Sources */,
71C7FFD02013C7E5006EB75A /* RNCameraUtils.m in Sources */,
7162BE682013EAA400FE51FF /* RNCameraManager.m in Sources */,
7147DBB32015319E003C59C3 /* RNFaceDetectorManager.m in Sources */,
7147DBB620155340003C59C3 /* RNFaceDetectorModule.m in Sources */,
7162BE672013EAA100FE51FF /* RNCamera.m in Sources */,
9FE592B31CA3CBF500788287 /* RCTSensorOrientationChecker.m in Sources */,
71C7FFD32013C817006EB75A /* RNImageUtils.m in Sources */,
@ -217,6 +255,10 @@
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
FRAMEWORK_SEARCH_PATHS = (
"${BUILT_PRODUCTS_DIR}/**",
"$(PROJECT_DIR)/../../../ios/Pods/**",
);
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_OPTIMIZATION_LEVEL = 0;
@ -231,8 +273,9 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = "";
HEADER_SEARCH_PATHS = "$(PROJECT_DIR)/../../../ios/Pods/**";
IPHONEOS_DEPLOYMENT_TARGET = 8.2;
LIBRARY_SEARCH_PATHS = "$(PROJECT_DIR)/../../../ios/Pods/**";
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
OTHER_LDFLAGS = "-ObjC";
@ -262,6 +305,10 @@
COPY_PHASE_STRIP = NO;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
FRAMEWORK_SEARCH_PATHS = (
"${BUILT_PRODUCTS_DIR}/**",
"$(PROJECT_DIR)/../../../ios/Pods/**",
);
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
@ -269,8 +316,9 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = "";
HEADER_SEARCH_PATHS = "$(PROJECT_DIR)/../../../ios/Pods/**";
IPHONEOS_DEPLOYMENT_TARGET = 8.2;
LIBRARY_SEARCH_PATHS = "$(PROJECT_DIR)/../../../ios/Pods/**";
MTL_ENABLE_DEBUG_INFO = NO;
OTHER_LDFLAGS = "-ObjC";
PRODUCT_NAME = "$(TARGET_NAME)";

View File

@ -3,17 +3,11 @@
#import <React/RCTBridgeModule.h>
#import <UIKit/UIKit.h>
#import "RNCamera.h"
//#if __has_include("EXFaceDetectorManager.h")
//#import "EXFaceDetectorManager.h"
//#else
//#import "EXFaceDetectorManagerStub.h"
//#endif
#import "RNFaceDetectorManager.h"
@class RNCamera;
//@interface CandidateRCTCamera : UIView <AVCaptureMetadataOutputObjectsDelegate, AVCaptureFileOutputRecordingDelegate, EXFaceDetectorDelegate>
@interface RNCamera : UIView <AVCaptureMetadataOutputObjectsDelegate, AVCaptureFileOutputRecordingDelegate>
@interface RNCamera : UIView <AVCaptureMetadataOutputObjectsDelegate, AVCaptureFileOutputRecordingDelegate, RNFaceDetectorDelegate>
@property(nonatomic, strong) dispatch_queue_t sessionQueue;
@property(nonatomic, strong) AVCaptureSession *session;

View File

@ -17,7 +17,7 @@
@property (nonatomic, strong) RCTPromiseResolveBlock videoRecordedResolve;
@property (nonatomic, strong) RCTPromiseRejectBlock videoRecordedReject;
@property (nonatomic, strong) id faceDetectorManager;
@property (nonatomic, strong) RNFaceDetectorManager *faceDetectorManager;
@property (nonatomic, copy) RCTDirectEventBlock onCameraReady;
@property (nonatomic, copy) RCTDirectEventBlock onMountError;
@ -287,22 +287,22 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
- (void)updateFaceDetecting:(id)faceDetecting
{
// [_faceDetectorManager setIsEnabled:faceDetecting];
[_faceDetectorManager setIsEnabled:faceDetecting];
}
- (void)updateFaceDetectionMode:(id)requestedMode
{
// [_faceDetectorManager setMode:requestedMode];
[_faceDetectorManager setMode:requestedMode];
}
- (void)updateFaceDetectionLandmarks:(id)requestedLandmarks
{
// [_faceDetectorManager setLandmarksDetected:requestedLandmarks];
[_faceDetectorManager setLandmarksDetected:requestedLandmarks];
}
- (void)updateFaceDetectionClassifications:(id)requestedClassifications
{
// [_faceDetectorManager setClassificationsDetected:requestedClassifications];
[_faceDetectorManager setClassificationsDetected:requestedClassifications];
}
- (void)takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
@ -365,7 +365,7 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
// At the time of writing AVCaptureMovieFileOutput and AVCaptureVideoDataOutput (> GMVDataOutput)
// cannot coexist on the same AVSession (see: https://stackoverflow.com/a/4986032/1123156).
// We stop face detection here and restart it in when AVCaptureMovieFileOutput finishes recording.
// [_faceDetectorManager stopFaceDetection];
[_faceDetectorManager stopFaceDetection];
[self setupMovieFileCapture];
}
@ -426,7 +426,7 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
self.stillImageOutput = stillImageOutput;
}
// [_faceDetectorManager maybeStartFaceDetectionOnSession:_session withPreviewLayer:_previewLayer];
[_faceDetectorManager maybeStartFaceDetectionOnSession:_session withPreviewLayer:_previewLayer];
[self setupOrDisableBarcodeScanner];
__weak RNCamera *weakSelf = self;
@ -452,7 +452,7 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
return;
#endif
dispatch_async(self.sessionQueue, ^{
// [_faceDetectorManager stopFaceDetection];
[_faceDetectorManager stopFaceDetection];
[self.previewLayer removeFromSuperlayer];
[self.session commitConfiguration];
[self.session stopRunning];
@ -707,7 +707,7 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
[self cleanupMovieFileCapture];
// If face detection has been running prior to recording to file
// we reenable it here (see comment in -record).
// [_faceDetectorManager maybeStartFaceDetectionOnSession:_session withPreviewLayer:_previewLayer];
[_faceDetectorManager maybeStartFaceDetectionOnSession:_session withPreviewLayer:_previewLayer];
if (self.session.sessionPreset != AVCaptureSessionPresetHigh) {
[self updateSessionPreset:AVCaptureSessionPresetHigh];
@ -716,18 +716,9 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
# pragma mark - Face detector
- (id)createFaceDetectorManager
- (RNFaceDetectorManager*)createFaceDetectorManager
{
// Class faceDetectorManagerClass = NSClassFromString(@"EXFaceDetectorManager"); //ruim
// Class faceDetectorManagerStubClass = NSClassFromString(@"EXFaceDetectorManagerStub"); //ruim
//
// if (faceDetectorManagerClass) {
// return [[faceDetectorManagerClass alloc] initWithSessionQueue:_sessionQueue delegate:self];
// } else if (faceDetectorManagerStubClass) {
// return [[faceDetectorManagerStubClass alloc] init];
// }
return nil;
return [[RNFaceDetectorManager alloc] initWithSessionQueue:_sessionQueue delegate:self];
}
- (void)onFacesDetected:(NSArray<NSDictionary *> *)faces

View File

@ -9,19 +9,14 @@
#import <React/RCTUtils.h>
#import <React/UIView+React.h>
//#if __has_include("EXFaceDetectorManager.h")
//#import "EXFaceDetectorManager.h"
//#else
//#import "EXFaceDetectorManagerStub.h"
//#endif
@implementation RNCameraManager
RCT_EXPORT_MODULE(ReactNativeCameraManager);
RCT_EXPORT_MODULE(RNCameraManager);
//RCT_EXPORT_MODULE(RNCamera);
RCT_EXPORT_VIEW_PROPERTY(onCameraReady, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onMountError, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onBarCodeRead, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onFacesDetected, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onFaceDetected, RCTDirectEventBlock);
+ (BOOL)requiresMainQueueSetup
{
@ -61,15 +56,14 @@ RCT_EXPORT_VIEW_PROPERTY(onFacesDetected, RCTDirectEventBlock);
@"480p": @(RNCameraVideo4x3),
@"4:3": @(RNCameraVideo4x3),
},
@"BarCodeType" : [[self class] validBarCodeTypes]
// @"FaceDetection" : [[self class] faceDetectorConstants]
@"BarCodeType" : [[self class] validBarCodeTypes],
@"FaceDetection" : [[self class] faceDetectorConstants]
};
}
- (NSArray<NSString *> *)supportedEvents
{
// return @[@"onCameraReady", @"onMountError", @"onBarCodeRead", @"onFacesDetected"];
return @[@"onCameraReady", @"onMountError", @"onBarCodeRead"];
return @[@"onCameraReady", @"onMountError", @"onBarCodeRead", @"onFaceDetected"];
}
+ (NSDictionary *)validBarCodeTypes
@ -93,13 +87,7 @@ RCT_EXPORT_VIEW_PROPERTY(onFacesDetected, RCTDirectEventBlock);
+ (NSDictionary *)faceDetectorConstants
{
//#if __has_include("EXFaceDetectorManager.h")
// return [EXFaceDetectorManager constants];
//#elif __has_include("EXFaceDetectorManagerStub.h")
// return [EXFaceDetectorManagerStub constants];
//#endif
return nil;
return [RNFaceDetectorManager constants];
}
RCT_CUSTOM_VIEW_PROPERTY(type, NSInteger, RNCamera)
@ -174,7 +162,6 @@ RCT_CUSTOM_VIEW_PROPERTY(barCodeTypes, NSArray, RNCamera)
RCT_REMAP_METHOD(takePicture,
options:(NSDictionary *)options
reactTag:(nonnull NSNumber *)reactTag
resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject)
{
@ -192,9 +179,15 @@ RCT_REMAP_METHOD(takePicture,
}
resolve(response);
#else
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, UIView *> *viewRegistry) {
RNCamera *view = nil;
for (NSNumber *reactTag in viewRegistry) {
UIView *reactView = viewRegistry[reactTag];
if ([reactView isKindOfClass:[RNCamera class]]) {
view = (RNCamera *)reactView;
}
}
if (!view) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else {
[view takePicture:options resolve:resolve reject:reject];