Merge pull request #1341 from brunolemos/fix/face-feature-optional

[iOS] Make GoogleMobileVision framework optional
This commit is contained in:
Sibelius Seraphini 2018-03-23 09:29:05 -03:00 committed by GitHub
commit fb501ab767
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 59 additions and 13 deletions

View File

@ -84,8 +84,9 @@ pod 'react-native-camera', path: '../node_modules/react-native-camera'
1. `npm install react-native-camera --save` 1. `npm install react-native-camera --save`
2. In XCode, in the project navigator, right click `Libraries``Add Files to [your project's name]` 2. In XCode, in the project navigator, right click `Libraries``Add Files to [your project's name]`
3. Go to `node_modules``react-native-camera` and add `RNCamera.xcodeproj` 3. Go to `node_modules``react-native-camera` and add `RNCamera.xcodeproj`
4. In XCode, in the project navigator, select your project. Add `libRNCamera.a` to your project's `Build Phases``Link Binary With Libraries` 4. Expand the `RNCamera.xcodeproj``Products` folder
5. Click `RNCamera.xcodeproj` in the project navigator and go the `Build Settings` tab. Make sure 'All' is toggled on (instead of 'Basic'). In the `Search Paths` section, look for `Header Search Paths` and make sure it contains both `$(SRCROOT)/../../react-native/React` and `$(SRCROOT)/../../../React` - mark both as `recursive`. 5. In XCode, in the project navigator, select your project. Add `libRNCamera.a` to your project's `Build Phases``Link Binary With Libraries`
6. Click `RNCamera.xcodeproj` in the project navigator and go the `Build Settings` tab. Make sure 'All' is toggled on (instead of 'Basic'). In the `Search Paths` section, look for `Header Search Paths` and make sure it contains both `$(SRCROOT)/../../react-native/React` and `$(SRCROOT)/../../../React` - mark both as `recursive`.
### Face Detection or Text Recognition Steps ### Face Detection or Text Recognition Steps
@ -150,7 +151,7 @@ Google Symbol Utilities: https://www.gstatic.com/cpdc/dbffca986f6337f8-GoogleSym
#### Android #### Android
1. `npm install react-native-camera --save` 1. `npm install react-native-camera --save`
2. Open up `android/app/src/main/java/[...]/MainApplication.java 2. Open up `android/app/src/main/java/[...]/MainApplication.java`
- Add `import org.reactnative.camera.RNCameraPackage;` to the imports at the top of the file - Add `import org.reactnative.camera.RNCameraPackage;` to the imports at the top of the file
- Add `new RNCameraPackage()` to the list returned by the `getPackages()` method. Add a comma to the previous item if there's already something there. - Add `new RNCameraPackage()` to the list returned by the `getPackages()` method. Add a comma to the previous item if there's already something there.
@ -164,8 +165,17 @@ Google Symbol Utilities: https://www.gstatic.com/cpdc/dbffca986f6337f8-GoogleSym
4. Insert the following lines inside the dependencies block in `android/app/build.gradle`: 4. Insert the following lines inside the dependencies block in `android/app/build.gradle`:
```gradle ```gradle
compile project(':react-native-camera') compile (project(':react-native-camera')) {
exclude group: "com.google.android.gms"
compile 'com.android.support:exifinterface:25.+'
compile ('com.google.android.gms:play-services-vision:10.2.0') {
force = true
}
}
``` ```
> You may need to use different versions, e.g. `27.+` instead of `25.+` and `11.8.0` instead of `10.2.0`.
5. Declare the permissions in your Android Manifest (required for `video recording` feature) 5. Declare the permissions in your Android Manifest (required for `video recording` feature)
```java ```java

View File

@ -8,13 +8,17 @@
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h> #import <AVFoundation/AVFoundation.h>
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
#import <GoogleMobileVision/GoogleMobileVision.h> #import <GoogleMobileVision/GoogleMobileVision.h>
#import <GoogleMVDataOutput/GoogleMVDataOutput.h> #import <GoogleMVDataOutput/GoogleMVDataOutput.h>
#endif
@protocol RNFaceDetectorDelegate @protocol RNFaceDetectorDelegate
- (void)onFacesDetected:(NSArray<NSDictionary *> *)faces; - (void)onFacesDetected:(NSArray<NSDictionary *> *)faces;
@end @end
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
@interface RNFaceDetectorManager : NSObject @interface RNFaceDetectorManager : NSObject
- (NSDictionary *)constantsToExport; - (NSDictionary *)constantsToExport;
@ -31,3 +35,4 @@
- (void)stopFaceDetection; - (void)stopFaceDetection;
@end @end
#endif

View File

@ -5,6 +5,7 @@
// Created by Joao Guilherme Daros Fidelis on 21/01/18. // Created by Joao Guilherme Daros Fidelis on 21/01/18.
// //
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
#import <React/RCTConvert.h> #import <React/RCTConvert.h>
#import "RNCamera.h" #import "RNCamera.h"
#import "RNFaceEncoder.h" #import "RNFaceEncoder.h"
@ -272,3 +273,4 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
} }
@end @end
#endif

View File

@ -6,7 +6,9 @@
// //
#import <React/RCTBridgeModule.h> #import <React/RCTBridgeModule.h>
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
#import <GoogleMobileVision/GoogleMobileVision.h> #import <GoogleMobileVision/GoogleMobileVision.h>
#endif
@interface RNFaceDetectorModule : NSObject <RCTBridgeModule> @interface RNFaceDetectorModule : NSObject <RCTBridgeModule>
@end @end

View File

@ -5,6 +5,7 @@
// Created by Joao Guilherme Daros Fidelis on 21/01/18. // Created by Joao Guilherme Daros Fidelis on 21/01/18.
// //
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
#import "RNFaceDetectorModule.h" #import "RNFaceDetectorModule.h"
#import "RNFaceEncoder.h" #import "RNFaceEncoder.h"
#import "RNFileSystem.h" #import "RNFileSystem.h"
@ -193,3 +194,4 @@ RCT_EXPORT_METHOD(detectFaces:(nonnull NSDictionary *)options
} }
@end @end
#endif

View File

@ -5,6 +5,7 @@
// Created by Joao Guilherme Daros Fidelis on 21/01/18. // Created by Joao Guilherme Daros Fidelis on 21/01/18.
// //
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
#import <UIKit/UIKit.h> #import <UIKit/UIKit.h>
#import <CoreMedia/CoreMedia.h> #import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
@ -33,3 +34,4 @@ typedef NS_ENUM(NSInteger, RNFaceDetectionClassifications) {
+ (CGAffineTransform)transformFromDeviceOutput:(GMVDataOutput *)dataOutput toInterfaceVideoOrientation:(AVCaptureVideoOrientation)interfaceVideoOrientation; + (CGAffineTransform)transformFromDeviceOutput:(GMVDataOutput *)dataOutput toInterfaceVideoOrientation:(AVCaptureVideoOrientation)interfaceVideoOrientation;
@end @end
#endif

View File

@ -5,6 +5,7 @@
// Created by Joao Guilherme Daros Fidelis on 21/01/18. // Created by Joao Guilherme Daros Fidelis on 21/01/18.
// //
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
#import "RNCameraUtils.h" #import "RNCameraUtils.h"
#import "RNFaceDetectorUtils.h" #import "RNFaceDetectorUtils.h"
#import "RNFaceDetectorPointTransformCalculator.h" #import "RNFaceDetectorPointTransformCalculator.h"
@ -75,3 +76,4 @@ NSString *const RNGMVDataOutputHeightKey = @"Height";
} }
@end @end
#endif

View File

@ -6,6 +6,7 @@
// //
#import <UIKit/UIKit.h> #import <UIKit/UIKit.h>
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
#import <GoogleMobileVision/GoogleMobileVision.h> #import <GoogleMobileVision/GoogleMobileVision.h>
@interface RNFaceEncoder : NSObject @interface RNFaceEncoder : NSObject
@ -15,3 +16,4 @@
- (NSDictionary *)encode:(GMVFaceFeature *)face; - (NSDictionary *)encode:(GMVFaceFeature *)face;
@end @end
#endif

View File

@ -7,6 +7,7 @@
#import "RNFaceEncoder.h" #import "RNFaceEncoder.h"
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
#define cDefaultFloatComparisonEpsilon 0.0001 #define cDefaultFloatComparisonEpsilon 0.0001
#define cModEqualFloatsWithEpsilon(dividend, divisor, modulo, epsilon) \ #define cModEqualFloatsWithEpsilon(dividend, divisor, modulo, epsilon) \
fabs( fmod(dividend, divisor) - modulo ) < epsilon fabs( fmod(dividend, divisor) - modulo ) < epsilon
@ -117,3 +118,4 @@ cModEqualFloatsWithEpsilon(dividend, divisor, modulo, cDefaultFloatComparisonEps
} }
@end @end
#endif

View File

@ -284,6 +284,7 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
[device unlockForConfiguration]; [device unlockForConfiguration];
} }
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
- (void)updateFaceDetecting:(id)faceDetecting - (void)updateFaceDetecting:(id)faceDetecting
{ {
[_faceDetectorManager setIsEnabled:faceDetecting]; [_faceDetectorManager setIsEnabled:faceDetecting];
@ -303,6 +304,7 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
{ {
[_faceDetectorManager setClassificationsDetected:requestedClassifications]; [_faceDetectorManager setClassificationsDetected:requestedClassifications];
} }
#endif
- (void)takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject - (void)takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
{ {
@ -382,7 +384,9 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
// At the time of writing AVCaptureMovieFileOutput and AVCaptureVideoDataOutput (> GMVDataOutput) // At the time of writing AVCaptureMovieFileOutput and AVCaptureVideoDataOutput (> GMVDataOutput)
// cannot coexist on the same AVSession (see: https://stackoverflow.com/a/4986032/1123156). // cannot coexist on the same AVSession (see: https://stackoverflow.com/a/4986032/1123156).
// We stop face detection here and restart it in when AVCaptureMovieFileOutput finishes recording. // We stop face detection here and restart it in when AVCaptureMovieFileOutput finishes recording.
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
[_faceDetectorManager stopFaceDetection]; [_faceDetectorManager stopFaceDetection];
#endif
[self setupMovieFileCapture]; [self setupMovieFileCapture];
} }
@ -457,7 +461,9 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
self.stillImageOutput = stillImageOutput; self.stillImageOutput = stillImageOutput;
} }
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
[_faceDetectorManager maybeStartFaceDetectionOnSession:_session withPreviewLayer:_previewLayer]; [_faceDetectorManager maybeStartFaceDetectionOnSession:_session withPreviewLayer:_previewLayer];
#endif
[self setupOrDisableBarcodeScanner]; [self setupOrDisableBarcodeScanner];
__weak RNCamera *weakSelf = self; __weak RNCamera *weakSelf = self;
@ -483,7 +489,9 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
return; return;
#endif #endif
dispatch_async(self.sessionQueue, ^{ dispatch_async(self.sessionQueue, ^{
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
[_faceDetectorManager stopFaceDetection]; [_faceDetectorManager stopFaceDetection];
#endif
[self.previewLayer removeFromSuperlayer]; [self.previewLayer removeFromSuperlayer];
[self.session commitConfiguration]; [self.session commitConfiguration];
[self.session stopRunning]; [self.session stopRunning];
@ -742,9 +750,12 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
self.videoCodecType = nil; self.videoCodecType = nil;
[self cleanupMovieFileCapture]; [self cleanupMovieFileCapture];
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
// If face detection has been running prior to recording to file // If face detection has been running prior to recording to file
// we reenable it here (see comment in -record). // we reenable it here (see comment in -record).
[_faceDetectorManager maybeStartFaceDetectionOnSession:_session withPreviewLayer:_previewLayer]; [_faceDetectorManager maybeStartFaceDetectionOnSession:_session withPreviewLayer:_previewLayer];
#endif
if (self.session.sessionPreset != AVCaptureSessionPresetHigh) { if (self.session.sessionPreset != AVCaptureSessionPresetHigh) {
[self updateSessionPreset:AVCaptureSessionPresetHigh]; [self updateSessionPreset:AVCaptureSessionPresetHigh];
@ -758,11 +769,13 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
Class faceDetectorManagerClass = NSClassFromString(@"RNFaceDetectorManager"); Class faceDetectorManagerClass = NSClassFromString(@"RNFaceDetectorManager");
Class faceDetectorManagerStubClass = NSClassFromString(@"RNFaceDetectorManagerStub"); Class faceDetectorManagerStubClass = NSClassFromString(@"RNFaceDetectorManagerStub");
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
if (faceDetectorManagerClass) { if (faceDetectorManagerClass) {
return [[faceDetectorManagerClass alloc] initWithSessionQueue:_sessionQueue delegate:self]; return [[faceDetectorManagerClass alloc] initWithSessionQueue:_sessionQueue delegate:self];
} else if (faceDetectorManagerStubClass) { } else if (faceDetectorManagerStubClass) {
return [[faceDetectorManagerStubClass alloc] init]; return [[faceDetectorManagerStubClass alloc] init];
} }
#endif
return nil; return nil;
} }

View File

@ -105,11 +105,15 @@ RCT_EXPORT_VIEW_PROPERTY(onFacesDetected, RCTDirectEventBlock);
+ (NSDictionary *)faceDetectorConstants + (NSDictionary *)faceDetectorConstants
{ {
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
#if __has_include("RNFaceDetectorManager.h") #if __has_include("RNFaceDetectorManager.h")
return [RNFaceDetectorManager constants]; return [RNFaceDetectorManager constants];
#else #else
return [RNFaceDetectorManagerStub constants]; return [RNFaceDetectorManagerStub constants];
#endif #endif
#else
return [NSDictionary new];
#endif
} }
RCT_CUSTOM_VIEW_PROPERTY(type, NSInteger, RNCamera) RCT_CUSTOM_VIEW_PROPERTY(type, NSInteger, RNCamera)

View File

@ -125,9 +125,9 @@ export default class Camera extends React.Component<PropsType> {
flashMode: CameraManager.FlashMode, flashMode: CameraManager.FlashMode,
autoFocus: CameraManager.AutoFocus, autoFocus: CameraManager.AutoFocus,
whiteBalance: CameraManager.WhiteBalance, whiteBalance: CameraManager.WhiteBalance,
faceDetectionMode: CameraManager.FaceDetection.Mode, faceDetectionMode: (CameraManager.FaceDetection || {}).Mode,
faceDetectionLandmarks: CameraManager.FaceDetection.Landmarks, faceDetectionLandmarks: (CameraManager.FaceDetection || {}).Landmarks,
faceDetectionClassifications: CameraManager.FaceDetection.Classifications, faceDetectionClassifications: (CameraManager.FaceDetection || {}).Classifications,
}; };
static propTypes = { static propTypes = {
@ -164,10 +164,10 @@ export default class Camera extends React.Component<PropsType> {
autoFocus: CameraManager.AutoFocus.on, autoFocus: CameraManager.AutoFocus.on,
flashMode: CameraManager.FlashMode.off, flashMode: CameraManager.FlashMode.off,
whiteBalance: CameraManager.WhiteBalance.auto, whiteBalance: CameraManager.WhiteBalance.auto,
faceDetectionMode: CameraManager.FaceDetection.fast, faceDetectionMode: (CameraManager.FaceDetection || {}).fast,
barCodeTypes: Object.values(CameraManager.BarCodeType), barCodeTypes: Object.values(CameraManager.BarCodeType),
faceDetectionLandmarks: CameraManager.FaceDetection.Landmarks.none, faceDetectionLandmarks: ((CameraManager.FaceDetection || {}).Landmarks || {}).none,
faceDetectionClassifications: CameraManager.FaceDetection.Classifications.none, faceDetectionClassifications: ((CameraManager.FaceDetection || {}).Classifications || {}).none,
permissionDialogTitle: '', permissionDialogTitle: '',
permissionDialogMessage: '', permissionDialogMessage: '',
notAuthorizedView: ( notAuthorizedView: (