feat: Face Detection Orientation added

This commit is contained in:
Malte Peters 2018-02-09 13:46:21 +01:00
parent 7efbd50676
commit 00d1a5f84b
6 changed files with 110 additions and 63 deletions

View File

@ -23,7 +23,7 @@ using namespace cv;
}
- (id) init;
- (id) initWithDelegate:(id <OpenCVProcessorFaceDetectorDelegate>)delegateObj;
- (void)setExpectedFaceOrientation:(NSInteger)expectedFaceOrientation;
@end

View File

@ -5,12 +5,14 @@
@implementation OpenCVProcessor{
BOOL saveDemoFrame;
int processedFrames;
NSInteger expectedFaceOrientation;
}
- (id) init {
saveDemoFrame = false;
saveDemoFrame = true;
processedFrames = 0;
expectedFaceOrientation = -1;
NSString *path = [[NSBundle mainBundle] pathForResource:@"lbpcascade_frontalface_improved.xml"
ofType:nil];
@ -31,6 +33,11 @@
return self;
}
- (void)setExpectedFaceOrientation:(NSInteger)expectedOrientation
{
expectedFaceOrientation = expectedOrientation;
}
# pragma mark - OpenCV-Processing
#ifdef __cplusplus
@ -80,22 +87,39 @@
int orientation = 3;
//cv::equalizeHist(image, image);
// rotate image according to device-rotation
UIDeviceOrientation interfaceOrientation = [[UIDevice currentDevice] orientation];
if (interfaceOrientation == UIDeviceOrientationPortrait) {
orientation = 0;
transpose(image, image);
flip(image, image,1);
} else if (interfaceOrientation == UIDeviceOrientationPortraitUpsideDown) {
orientation = 2;
transpose(image, image);
flip(image, image,0);
} else if (interfaceOrientation == UIDeviceOrientationLandscapeLeft) {
orientation = 1;
flip(image, image,-1);
if(expectedFaceOrientation != -1){
orientation = expectedFaceOrientation;
} else {
// rotate image according to device-orientation
UIDeviceOrientation interfaceOrientation = [[UIDevice currentDevice] orientation];
if (interfaceOrientation == UIDeviceOrientationPortrait) {
orientation = 0;
} else if (interfaceOrientation == UIDeviceOrientationPortraitUpsideDown) {
orientation = 2;
} else if (interfaceOrientation == UIDeviceOrientationLandscapeLeft) {
orientation = 1;
}
}
cv::resize(image, image, cv::Size(0,0), 360./(float)image.cols, 360./(float)image.cols, cv::INTER_CUBIC);
switch(orientation){
case 0:
transpose(image, image);
flip(image, image,1);
break;
case 1:
flip(image, image,-1);
break;
case 2:
transpose(image, image);
flip(image, image,0);
break;
}
float imageWidth = 480.;
float scale = imageWidth / (float)image.cols;
float imageHeight = (float)image.rows * scale;
cv::resize(image, image, cv::Size(0,0), scale, scale, cv::INTER_CUBIC);
if(saveDemoFrame){
[self saveImageToDisk:image];
@ -104,17 +128,17 @@
objects.clear();
cascade.detectMultiScale(image,
objects,
2.0,
1.2,
3,
CV_HAAR_SCALE_IMAGE,
cv::Size(30, 30));
0,
cv::Size(10, 10));
if(objects.size() > 0){
NSMutableArray *faces = [[NSMutableArray alloc] initWithCapacity:objects.size()];
for( int i = 0; i < objects.size(); i++ )
{
cv::Rect face = objects[i];
id objects[] = { @(face.x), @(face.y), @(face.width), @(face.height), @(orientation) };
id objects[] = { [NSNumber numberWithFloat:face.x / imageWidth], [NSNumber numberWithFloat:face.y / imageHeight], [NSNumber numberWithFloat:face.width / imageWidth], [NSNumber numberWithFloat:face.height / imageHeight], @(orientation) };
id keys[] = { @"x", @"y", @"width", @"height", @"orientation" };
NSUInteger count = sizeof(objects) / sizeof(id);
NSDictionary *faceDescriptor = [NSDictionary dictionaryWithObjects:objects
@ -162,4 +186,3 @@
#endif
@end

View File

@ -41,6 +41,7 @@
- (void)updateZoom;
- (void)updateWhiteBalance;
- (void)updateFaceDetecting:(id)isDetectingFaces;
- (void)updateFaceDetectionExpectedOrientation:(NSInteger)expectedFaceOrientation;
- (void)updateFaceDetectionMode:(id)requestedMode;
- (void)updateFaceDetectionLandmarks:(id)requestedLandmarks;
- (void)updateFaceDetectionClassifications:(id)requestedClassifications;

View File

@ -288,6 +288,11 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
[device unlockForConfiguration];
}
- (void)updateFaceDetectionExpectedOrientation:(NSInteger)expectedFaceOrientation
{
[_openCVProcessor setExpectedFaceOrientation:expectedFaceOrientation];
}
- (void)updateFaceDetecting:(id)faceDetecting
{
[_faceDetectorManager setIsEnabled:faceDetecting];

View File

@ -131,6 +131,12 @@ RCT_CUSTOM_VIEW_PROPERTY(whiteBalance, NSInteger, RNCamera)
[view updateWhiteBalance];
}
RCT_CUSTOM_VIEW_PROPERTY(faceDetectionExpectedOrientation, NSInteger, RNCamera)
{
[view updateFaceDetectionExpectedOrientation:[RCTConvert NSInteger:json]];
}
RCT_CUSTOM_VIEW_PROPERTY(faceDetectorEnabled, BOOL, RNCamera)
{
[view updateFaceDetecting:json];

View File

@ -2,16 +2,16 @@
import React from 'react';
import PropTypes from 'prop-types';
import { mapValues } from 'lodash';
import {
findNodeHandle,
Platform,
NativeModules,
ViewPropTypes,
requireNativeComponent,
View,
ActivityIndicator,
import {
findNodeHandle,
Platform,
NativeModules,
ViewPropTypes,
requireNativeComponent,
View,
ActivityIndicator,
Text,
} from 'react-native';
} from 'react-native';
import type { FaceFeature } from './FaceDetector';
@ -43,6 +43,7 @@ type PropsType = ViewPropTypes & {
onCameraReady?: Function,
onBarCodeRead?: Function,
faceDetectionMode?: number,
faceDetectionExpectedOrientation?: number,
flashMode?: number | string,
barCodeTypes?: Array<string>,
whiteBalance?: number | string,
@ -53,31 +54,31 @@ type PropsType = ViewPropTypes & {
captureAudio?: boolean,
};
const CameraManager: Object =
NativeModules.RNCameraManager || NativeModules.RNCameraModule || {
stubbed: true,
Type: {
back: 1,
},
AutoFocus: {
on: 1
},
FlashMode: {
off: 1,
},
WhiteBalance: {},
BarCodeType: {},
FaceDetection: {
fast: 1,
Mode: {},
Landmarks: {
none: 0,
const CameraManager: Object = NativeModules.RNCameraManager ||
NativeModules.RNCameraModule || {
stubbed: true,
Type: {
back: 1,
},
Classifications: {
none: 0,
AutoFocus: {
on: 1,
},
},
};
FlashMode: {
off: 1,
},
WhiteBalance: {},
BarCodeType: {},
FaceDetection: {
fast: 1,
Mode: {},
Landmarks: {
none: 0,
},
Classifications: {
none: 0,
},
},
};
const EventThrottleMs = 500;
@ -113,13 +114,18 @@ export default class Camera extends React.Component<PropsType> {
onBarCodeRead: PropTypes.func,
onFacesDetected: PropTypes.func,
faceDetectionMode: PropTypes.number,
faceDetectionExpectedOrientation: PropTypes.number,
faceDetectionLandmarks: PropTypes.number,
faceDetectionClassifications: PropTypes.number,
barCodeTypes: PropTypes.arrayOf(PropTypes.string),
type: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
flashMode: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
whiteBalance: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
autoFocus: PropTypes.oneOfType([PropTypes.string, PropTypes.number, PropTypes.bool]),
autoFocus: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number,
PropTypes.bool,
]),
permissionDialogTitle: PropTypes.string,
permissionDialogMessage: PropTypes.string,
notAuthorizedView: PropTypes.element,
@ -135,10 +141,12 @@ export default class Camera extends React.Component<PropsType> {
autoFocus: CameraManager.AutoFocus.on,
flashMode: CameraManager.FlashMode.off,
whiteBalance: CameraManager.WhiteBalance.auto,
faceDetectionExpectedOrientation: -1,
faceDetectionMode: CameraManager.FaceDetection.fast,
barCodeTypes: Object.values(CameraManager.BarCodeType),
faceDetectionLandmarks: CameraManager.FaceDetection.Landmarks.none,
faceDetectionClassifications: CameraManager.FaceDetection.Classifications.none,
faceDetectionClassifications:
CameraManager.FaceDetection.Classifications.none,
permissionDialogTitle: '',
permissionDialogMessage: '',
notAuthorizedView: (
@ -147,14 +155,12 @@ export default class Camera extends React.Component<PropsType> {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
}}
>
}}>
<Text
style={{
textAlign: 'center',
fontSize: 16,
}}
>
}}>
Camera not authorized
</Text>
</View>
@ -165,8 +171,7 @@ export default class Camera extends React.Component<PropsType> {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
}}
>
}}>
<ActivityIndicator size="small" />
</View>
),
@ -231,7 +236,9 @@ export default class Camera extends React.Component<PropsType> {
}
};
_onObjectDetected = (callback: ?Function) => ({ nativeEvent }: EventCallbackArgumentsType) => {
_onObjectDetected = (callback: ?Function) => ({
nativeEvent,
}: EventCallbackArgumentsType) => {
const { type } = nativeEvent;
if (
@ -262,7 +269,12 @@ export default class Camera extends React.Component<PropsType> {
async componentWillMount() {
const hasVideoAndAudio = this.props.captureAudio;
const isAuthorized = await requestPermissions(hasVideoAndAudio, CameraManager, this.props.permissionDialogTitle, this.props.permissionDialogMessage);
const isAuthorized = await requestPermissions(
hasVideoAndAudio,
CameraManager,
this.props.permissionDialogTitle,
this.props.permissionDialogMessage
);
this.setState({ isAuthorized, isAuthorizationChecked: true });
}