Merge remote-tracking branch 'upstream/master'

This commit is contained in:
Martin Puza 2018-02-14 18:52:30 +01:00
commit 1351170d73
121 changed files with 15759 additions and 732 deletions

45
.eslintrc Normal file
View File

@ -0,0 +1,45 @@
{
"env": {
"browser": true,
"node": true,
"jest": true,
"es6": true
},
"parser": "babel-eslint",
"parserOptions": {
"ecmaVersion": 6,
"sourceType": "module",
"ecmaFeatures": {
"modules": true
}
},
"plugins": [
"react",
"react-native",
],
"extends": [
"eslint:recommended",
"plugin:react/recommended",
],
"rules": {
"comma-dangle": [2, "always-multiline"],
"quotes": [2, "single", { "allowTemplateLiterals": true }],
"react/prop-types": 0,
"react/jsx-no-bind": 0,
"react/display-name": 0,
"new-cap": 0,
"react-native/no-unused-styles": 2,
"react-native/no-inline-styles": 1,
"react-native/no-color-literals": 0,
"no-class-assign": 1,
"no-console": 1,
"object-curly-spacing": [1, "always"],
"no-unused-vars": ["error", { "ignoreRestSiblings": true }]
},
"globals": {
"__DEV__": true,
"device": true,
"element": true,
"by": true,
}
}

2
.gitattributes vendored Normal file
View File

@ -0,0 +1,2 @@
# Disable git large files for now!
# RNCameraExample/ios/Frameworks/FaceDetector/Frameworks/frameworks/FaceDetector.framework/FaceDetector filter=lfs diff=lfs merge=lfs -text

View File

@ -1 +1,27 @@
<!-- Love react-native-camera? Please consider supporting our collective: 👉 https://opencollective.com/react-native-camera/donate -->
### Warning
RCTCamera is **DEPRECATED** on v1.0.0 follow our migration guide here https://github.com/react-native-community/react-native-camera/blob/master/docs/migration.md
### Which implementation are you using
*RNCamera* or RCTCamera (RCTCamera will be removed on v2.0.0)?
### Steps to reproduce
1.
2.
3.
### Expected behaviour
Tell us what should happen
### Actual behaviour
Tell us what happens instead
### Environment
- **Node.js version**:
- **React Native version**:
- **React Native platform + platform version**: iOS 9.0, Android 5.0, etc
### react-native-camera
**Version**: npm version or "master"
> Love react-native-camera? Please consider supporting our collective: 👉 https://opencollective.com/react-native-camera/donate

View File

@ -1,2 +1,3 @@
/Example
issue_template.md
/RNCameraExample

10
CHANGELOG.md Normal file
View File

@ -0,0 +1,10 @@
### master
### 1.0.0
- RNCamera as main camera implementation for both iOS and Android (base on expo module)
- FaceDetector feature for both iOS and Android (based on expo module)
- RCTCamera deprecated
### 0.13.0
- added RNCamera implementation for android
- added FaceDetector for android

View File

@ -1,3 +1,3 @@
{
"presets": ["react-native"]
}
"presets": ["react-native"]
}

View File

@ -12,19 +12,20 @@
; For RN Apps installed via npm, "Libraries" folder is inside
; "node_modules/react-native" but in the source repo it is in the root
.*/Libraries/react-native/React.js
.*/Libraries/react-native/ReactNative.js
; Ignore polyfills
.*/Libraries/polyfills/.*
[include]
[libs]
node_modules/react-native/Libraries/react-native/react-native-interface.js
node_modules/react-native/flow
flow/
node_modules/react-native/flow/
[options]
module.system=haste
emoji=true
experimental.strict_type_args=true
module.system=haste
munge_underscores=true
@ -32,13 +33,16 @@ module.name_mapper='^[./a-zA-Z0-9$_-]+\.\(bmp\|gif\|jpg\|jpeg\|png\|psd\|svg\|we
suppress_type=$FlowIssue
suppress_type=$FlowFixMe
suppress_type=$FlowFixMeProps
suppress_type=$FlowFixMeState
suppress_type=$FixMe
suppress_comment=\\(.\\|\n\\)*\\$FlowFixMe\\($\\|[^(]\\|(\\(>=0\\.\\(3[0-6]\\|[1-2][0-9]\\|[0-9]\\).[0-9]\\)? *\\(site=[a-z,_]*react_native[a-z,_]*\\)?)\\)
suppress_comment=\\(.\\|\n\\)*\\$FlowIssue\\((\\(>=0\\.\\(3[0-6]\\|1[0-9]\\|[1-2][0-9]\\).[0-9]\\)? *\\(site=[a-z,_]*react_native[a-z,_]*\\)?)\\)?:? #[0-9]+
suppress_comment=\\(.\\|\n\\)*\\$FlowFixMe\\($\\|[^(]\\|(\\(>=0\\.\\(5[0-7]\\|[1-4][0-9]\\|[0-9]\\).[0-9]\\)? *\\(site=[a-z,_]*react_native[a-z,_]*\\)?)\\)
suppress_comment=\\(.\\|\n\\)*\\$FlowIssue\\((\\(>=0\\.\\(5[0-7]\\|[1-4][0-9]\\|[0-9]\\).[0-9]\\)? *\\(site=[a-z,_]*react_native[a-z,_]*\\)?)\\)?:? #[0-9]+
suppress_comment=\\(.\\|\n\\)*\\$FlowFixedInNextDeploy
suppress_comment=\\(.\\|\n\\)*\\$FlowExpectedError
unsafe.enable_getters_and_setters=true
[version]
^0.36.0
^0.57.0

10
Example/.gitignore vendored
View File

@ -34,11 +34,11 @@ local.properties
#
node_modules/
npm-debug.log
yarn-error.log
# BUCK
buck-out/
\.buckd/
android/app/libs
*.keystore
# fastlane
@ -46,8 +46,8 @@ android/app/libs
# It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
# screenshots whenever they are needed.
# For more information about the recommended setup visit:
# https://github.com/fastlane/fastlane/blob/master/fastlane/docs/Gitignore.md
# https://docs.fastlane.tools/best-practices/source-control/
fastlane/report.xml
fastlane/Preview.html
fastlane/screenshots
*/fastlane/report.xml
*/fastlane/Preview.html
*/fastlane/screenshots

View File

@ -1,11 +1,5 @@
import React from 'react';
import {
Image,
StatusBar,
StyleSheet,
TouchableOpacity,
View,
} from 'react-native';
import { Image, StatusBar, StyleSheet, TouchableOpacity, View } from 'react-native';
import Camera from 'react-native-camera';
const styles = StyleSheet.create({
@ -15,7 +9,7 @@ const styles = StyleSheet.create({
preview: {
flex: 1,
justifyContent: 'flex-end',
alignItems: 'center',
alignItems: 'center'
},
overlay: {
position: 'absolute',
@ -68,37 +62,39 @@ export default class Example extends React.Component {
orientation: Camera.constants.Orientation.auto,
flashMode: Camera.constants.FlashMode.auto,
},
isRecording: false
isRecording: false,
};
}
takePicture = () => {
if (this.camera) {
this.camera.capture()
.then((data) => console.log(data))
this.camera
.capture()
.then(data => console.log(data))
.catch(err => console.error(err));
}
}
};
startRecording = () => {
if (this.camera) {
this.camera.capture({mode: Camera.constants.CaptureMode.video})
.then((data) => console.log(data))
.catch(err => console.error(err));
this.camera
.capture({ mode: Camera.constants.CaptureMode.video })
.then(data => console.log(data))
.catch(err => console.error(err));
this.setState({
isRecording: true
isRecording: true,
});
}
}
};
stopRecording = () => {
if (this.camera) {
this.camera.stopCapture();
this.setState({
isRecording: false
isRecording: false,
});
}
}
};
switchType = () => {
let newType;
@ -116,7 +112,7 @@ export default class Example extends React.Component {
type: newType,
},
});
}
};
get typeIcon() {
let icon;
@ -149,7 +145,7 @@ export default class Example extends React.Component {
flashMode: newFlashMode,
},
});
}
};
get flashIcon() {
let icon;
@ -169,12 +165,9 @@ export default class Example extends React.Component {
render() {
return (
<View style={styles.container}>
<StatusBar
animated
hidden
/>
<StatusBar animated hidden />
<Camera
ref={(cam) => {
ref={cam => {
this.camera = cam;
}}
style={styles.preview}
@ -186,62 +179,35 @@ export default class Example extends React.Component {
onZoomChanged={() => {}}
defaultTouchToFocus
mirrorImage={false}
cropToPreview={false}
permissionDialogTitle="Sample title"
permissionDialogMessage="Sample dialog message"
/>
<View style={[styles.overlay, styles.topOverlay]}>
<TouchableOpacity
style={styles.typeButton}
onPress={this.switchType}
>
<Image
source={this.typeIcon}
/>
<TouchableOpacity style={styles.typeButton} onPress={this.switchType}>
<Image source={this.typeIcon} />
</TouchableOpacity>
<TouchableOpacity
style={styles.flashButton}
onPress={this.switchFlash}
>
<Image
source={this.flashIcon}
/>
<TouchableOpacity style={styles.flashButton} onPress={this.switchFlash}>
<Image source={this.flashIcon} />
</TouchableOpacity>
</View>
<View style={[styles.overlay, styles.bottomOverlay]}>
{
!this.state.isRecording
&&
<TouchableOpacity
style={styles.captureButton}
onPress={this.takePicture}
>
<Image
source={require('./assets/ic_photo_camera_36pt.png')}
/>
{(!this.state.isRecording && (
<TouchableOpacity style={styles.captureButton} onPress={this.takePicture}>
<Image source={require('./assets/ic_photo_camera_36pt.png')} />
</TouchableOpacity>
||
null
}
)) ||
null}
<View style={styles.buttonsSpace} />
{
!this.state.isRecording
&&
<TouchableOpacity
style={styles.captureButton}
onPress={this.startRecording}
>
<Image
source={require('./assets/ic_videocam_36pt.png')}
/>
</TouchableOpacity>
||
<TouchableOpacity
style={styles.captureButton}
onPress={this.stopRecording}
>
<Image
source={require('./assets/ic_stop_36pt.png')}
/>
</TouchableOpacity>
}
{(!this.state.isRecording && (
<TouchableOpacity style={styles.captureButton} onPress={this.startRecording}>
<Image source={require('./assets/ic_videocam_36pt.png')} />
</TouchableOpacity>
)) || (
<TouchableOpacity style={styles.captureButton} onPress={this.stopRecording}>
<Image source={require('./assets/ic_stop_36pt.png')} />
</TouchableOpacity>
)}
</View>
</View>
);

View File

@ -1,11 +0,0 @@
#### `Run example`
From project root run through cli:
- `cd Example/`
- `npm install`
For Android:
- `react-native run-android`
For iOS:
- `react-native run-ios`

12
Example/__tests__/App.js Normal file
View File

@ -0,0 +1,12 @@
import 'react-native';
import React from 'react';
import App from '../App';
// Note: test renderer must be required after react-native.
import renderer from 'react-test-renderer';
it('renders correctly', () => {
const tree = renderer.create(
<App />
);
});

View File

@ -1,5 +1,3 @@
import re
# To learn about Buck see [Docs](https://buckbuild.com/).
# To run your application with Buck:
# - install Buck
@ -11,8 +9,9 @@ import re
#
lib_deps = []
for jarfile in glob(['libs/*.jar']):
name = 'jars__' + re.sub(r'^.*/([^/]+)\.jar$', r'\1', jarfile)
name = 'jars__' + jarfile[jarfile.rindex('/') + 1: jarfile.rindex('.jar')]
lib_deps.append(':' + name)
prebuilt_jar(
name = name,
@ -20,7 +19,7 @@ for jarfile in glob(['libs/*.jar']):
)
for aarfile in glob(['libs/*.aar']):
name = 'aars__' + re.sub(r'^.*/([^/]+)\.aar$', r'\1', aarfile)
name = 'aars__' + aarfile[aarfile.rindex('/') + 1: aarfile.rindex('.aar')]
lib_deps.append(':' + name)
android_prebuilt_aar(
name = name,
@ -28,39 +27,39 @@ for aarfile in glob(['libs/*.aar']):
)
android_library(
name = 'all-libs',
exported_deps = lib_deps
name = "all-libs",
exported_deps = lib_deps,
)
android_library(
name = 'app-code',
srcs = glob([
'src/main/java/**/*.java',
]),
deps = [
':all-libs',
':build_config',
':res',
],
name = "app-code",
srcs = glob([
"src/main/java/**/*.java",
]),
deps = [
":all-libs",
":build_config",
":res",
],
)
android_build_config(
name = 'build_config',
package = 'com.example',
name = "build_config",
package = "com.example",
)
android_resource(
name = 'res',
res = 'src/main/res',
package = 'com.example',
name = "res",
package = "com.example",
res = "src/main/res",
)
android_binary(
name = 'app',
package_type = 'debug',
manifest = 'src/main/AndroidManifest.xml',
keystore = '//android/keystores:debug',
deps = [
':app-code',
],
name = "app",
keystore = "//android/keystores:debug",
manifest = "src/main/AndroidManifest.xml",
package_type = "debug",
deps = [
":app-code",
],
)

View File

@ -33,6 +33,13 @@ import com.android.build.OutputFile
* // bundleInPaidRelease: true,
* // bundleInBeta: true,
*
* // whether to disable dev mode in custom build variants (by default only disabled in release)
* // for example: to disable dev mode in the staging build type (if configured)
* devDisabledInStaging: true,
* // The configuration property can be in the following formats
* // 'devDisabledIn${productFlavor}${buildType}'
* // 'devDisabledIn${buildType}'
*
* // the root of your project, i.e. where "package.json" lives
* root: "../../",
*
@ -58,13 +65,17 @@ import com.android.build.OutputFile
* inputExcludes: ["android/**", "ios/**"],
*
* // override which node gets called and with what additional arguments
* nodeExecutableAndArgs: ["node"]
* nodeExecutableAndArgs: ["node"],
*
* // supply additional arguments to the packager
* extraPackagerArgs: []
* ]
*/
project.ext.react = [
entryFile: "index.js"
]
apply from: "../../node_modules/react-native/react.gradle"
/**
@ -83,15 +94,15 @@ def enableSeparateBuildPerCPUArchitecture = false
def enableProguardInReleaseBuilds = false
android {
compileSdkVersion 23
buildToolsVersion "23.0.1"
compileSdkVersion rootProject.ext.compileSdkVersion
buildToolsVersion rootProject.ext.buildToolsVersion
defaultConfig {
applicationId "com.example"
minSdkVersion 16
targetSdkVersion 22
targetSdkVersion 26
versionCode 1
versionName "1.0"
versionName "1.0.0"
ndk {
abiFilters "armeabi-v7a", "x86"
}

View File

@ -50,6 +50,10 @@
-dontwarn com.facebook.react.**
# TextLayoutBuilder uses a non-public Android constructor within StaticLayout.
# See libs/proxy/src/main/java/com/facebook/fbui/textlayoutbuilder/proxy for details.
-dontwarn android.text.StaticLayout
# okhttp
-keepattributes Signature

View File

@ -5,18 +5,12 @@
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.RECORD_VIDEO"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-feature android:name="android.hardware.camera" android:required="false" />
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false" />
<uses-sdk
android:minSdkVersion="16"
android:targetSdkVersion="22" />
@ -30,7 +24,8 @@
<activity
android:name=".MainActivity"
android:label="@string/app_name"
android:configChanges="keyboard|keyboardHidden|orientation|screenSize">
android:configChanges="keyboard|keyboardHidden|orientation|screenSize"
android:windowSoftInputMode="adjustResize">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />

View File

@ -1,17 +1,14 @@
package com.example;
import android.app.Application;
import android.util.Log;
import com.facebook.react.ReactApplication;
import com.facebook.react.ReactInstanceManager;
import com.lwansbrough.RCTCamera.RCTCameraPackage;
import com.facebook.react.ReactNativeHost;
import com.facebook.react.ReactPackage;
import com.facebook.react.shell.MainReactPackage;
import com.facebook.soloader.SoLoader;
import com.lwansbrough.RCTCamera.RCTCameraPackage;
import java.util.Arrays;
import java.util.List;
@ -19,7 +16,7 @@ public class MainApplication extends Application implements ReactApplication {
private final ReactNativeHost mReactNativeHost = new ReactNativeHost(this) {
@Override
protected boolean getUseDeveloperSupport() {
public boolean getUseDeveloperSupport() {
return BuildConfig.DEBUG;
}
@ -27,9 +24,14 @@ public class MainApplication extends Application implements ReactApplication {
protected List<ReactPackage> getPackages() {
return Arrays.<ReactPackage>asList(
new MainReactPackage(),
new RCTCameraPackage()
new RCTCameraPackage()
);
}
@Override
protected String getJSMainModuleName() {
return "index";
}
};
@Override

View File

@ -3,9 +3,10 @@
buildscript {
repositories {
jcenter()
google()
}
dependencies {
classpath 'com.android.tools.build:gradle:1.3.1'
classpath 'com.android.tools.build:gradle:3.0.1'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
@ -20,5 +21,23 @@ allprojects {
// All of React Native (JS, Obj-C sources, Android binaries) is installed from npm
url "$rootDir/../node_modules/react-native/android"
}
google()
}
}
ext {
compileSdkVersion = 26
buildToolsVersion = '26.0.2'
}
subprojects { subproject ->
afterEvaluate{
if((subproject.plugins.hasPlugin('android') || subproject.plugins.hasPlugin('android-library'))) {
android {
compileSdkVersion rootProject.ext.compileSdkVersion
buildToolsVersion rootProject.ext.buildToolsVersion
}
}
}
}

View File

@ -2,4 +2,4 @@ distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-2.4-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-4.1-all.zip

View File

@ -1,8 +1,8 @@
keystore(
name = 'debug',
store = 'debug.keystore',
properties = 'debug.keystore.properties',
visibility = [
'PUBLIC',
],
name = "debug",
properties = "debug.keystore.properties",
store = "debug.keystore",
visibility = [
"PUBLIC",
],
)

View File

@ -3,5 +3,3 @@ include ':react-native-camera'
project(':react-native-camera').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-camera/android')
include ':app'
include ':react-native-camera'
project(':react-native-camera').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-camera/android')

4
Example/app.json Normal file
View File

@ -0,0 +1,4 @@
{
"name": "Example",
"displayName": "Example"
}

View File

@ -1,6 +0,0 @@
import React from 'react';
import { AppRegistry } from 'react-native';
import Example from './Example';
AppRegistry.registerComponent('Example', () => Example);

View File

@ -1,6 +1,4 @@
import React from 'react';
import { AppRegistry } from 'react-native';
import Example from './Example';
AppRegistry.registerComponent('Example', () => Example);

View File

@ -0,0 +1,54 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
<key>NSLocationWhenInUseUsageDescription</key>
<string></string>
<key>NSAppTransportSecurity</key>
<!--See http://ste.vn/2015/06/10/configuring-app-transport-security-ios-9-osx-10-11/ -->
<dict>
<key>NSExceptionDomains</key>
<dict>
<key>localhost</key>
<dict>
<key>NSExceptionAllowsInsecureHTTPLoads</key>
<true/>
</dict>
</dict>
</dict>
</dict>
</plist>

View File

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>BNDL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
</dict>
</plist>

View File

@ -22,9 +22,22 @@
13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; };
140ED2AC1D01E1AD002B40FF /* libReact.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 146834041AC3E56700842450 /* libReact.a */; };
146834051AC3E58100842450 /* libReact.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 146834041AC3E56700842450 /* libReact.a */; };
19BFA888017F428490C2439E /* libRCTCamera.a in Frameworks */ = {isa = PBXBuildFile; fileRef = E3AE52E5006149BA91BB9B89 /* libRCTCamera.a */; };
2D02E4BC1E0B4A80006451C7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB01A68108700A75B9A /* AppDelegate.m */; };
2D02E4BD1E0B4A84006451C7 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB51A68108700A75B9A /* Images.xcassets */; };
2D02E4BF1E0B4AB3006451C7 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; };
2D02E4C21E0B4AEC006451C7 /* libRCTAnimation.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 5E9157351DD0AC6500FF2AA8 /* libRCTAnimation.a */; };
2D02E4C31E0B4AEC006451C7 /* libRCTImage-tvOS.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3DAD3E841DF850E9000B6D8A /* libRCTImage-tvOS.a */; };
2D02E4C41E0B4AEC006451C7 /* libRCTLinking-tvOS.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3DAD3E881DF850E9000B6D8A /* libRCTLinking-tvOS.a */; };
2D02E4C51E0B4AEC006451C7 /* libRCTNetwork-tvOS.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3DAD3E8C1DF850E9000B6D8A /* libRCTNetwork-tvOS.a */; };
2D02E4C61E0B4AEC006451C7 /* libRCTSettings-tvOS.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3DAD3E901DF850E9000B6D8A /* libRCTSettings-tvOS.a */; };
2D02E4C71E0B4AEC006451C7 /* libRCTText-tvOS.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3DAD3E941DF850E9000B6D8A /* libRCTText-tvOS.a */; };
2D02E4C81E0B4AEC006451C7 /* libRCTWebSocket-tvOS.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3DAD3E991DF850E9000B6D8A /* libRCTWebSocket-tvOS.a */; };
2D02E4C91E0B4AEC006451C7 /* libReact-tvOS.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3DAD3EA31DF850E9000B6D8A /* libReact-tvOS.a */; };
2DCD954D1E0B4F2C00145EB5 /* ExampleTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 00E356F21AD99517003FC87E /* ExampleTests.m */; };
5E9157361DD0AC6A00FF2AA8 /* libRCTAnimation.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 5E9157331DD0AC6500FF2AA8 /* libRCTAnimation.a */; };
832341BD1AAA6AB300B99B32 /* libRCTText.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 832341B51AAA6A8300B99B32 /* libRCTText.a */; };
ADBDB9381DFEBF1600ED6528 /* libRCTBlob.a in Frameworks */ = {isa = PBXBuildFile; fileRef = ADBDB9271DFEBF0700ED6528 /* libRCTBlob.a */; };
C171C414BCA74BDEB03C55AD /* libRCTCamera.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 2420DD75E84D4100A808765F /* libRCTCamera.a */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
@ -91,6 +104,41 @@
remoteGlobalIDString = 83CBBA2E1A601D0E00E9B192;
remoteInfo = React;
};
208501451FF9332D00C1AD1F /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = ADBDB91F1DFEBF0600ED6528 /* RCTBlob.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = ADD01A681E09402E00F6D226;
remoteInfo = "RCTBlob-tvOS";
};
208501571FF9332D00C1AD1F /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 139FDEE61B06529A00C62182 /* RCTWebSocket.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = 3DBE0D001F3B181A0099AA32;
remoteInfo = fishhook;
};
208501591FF9332D00C1AD1F /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 139FDEE61B06529A00C62182 /* RCTWebSocket.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = 3DBE0D0D1F3B181C0099AA32;
remoteInfo = "fishhook-tvOS";
};
2085015E1FF9332F00C1AD1F /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 055DCF2769164ECEA190178E /* RCTCamera.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = 4107012F1ACB723B00C6AA39;
remoteInfo = RCTCamera;
};
2D02E4911E0B4A5D006451C7 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 83CBB9F71A601CBA00E9B192 /* Project object */;
proxyType = 1;
remoteGlobalIDString = 2D02E47A1E0B4A5D006451C7;
remoteInfo = "Example-tvOS";
};
3DAD3E831DF850E9000B6D8A /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 00C302BB1ABCB91800DB3ED1 /* RCTImage.xcodeproj */;
@ -210,12 +258,12 @@
remoteGlobalIDString = 58B5119B1A9E6C1200147676;
remoteInfo = RCTText;
};
BDD99F971D95C105007818E8 /* PBXContainerItemProxy */ = {
ADBDB9261DFEBF0700ED6528 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 0B8DD84DC6774E94A78B341F /* RCTCamera.xcodeproj */;
containerPortal = ADBDB91F1DFEBF0600ED6528 /* RCTBlob.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = 4107012F1ACB723B00C6AA39;
remoteInfo = RCTCamera;
remoteGlobalIDString = 358F4ED71D1E81A9004DF814;
remoteInfo = RCTBlob;
};
/* End PBXContainerItemProxy section */
@ -229,7 +277,7 @@
00E356EE1AD99517003FC87E /* ExampleTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = ExampleTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
00E356F11AD99517003FC87E /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
00E356F21AD99517003FC87E /* ExampleTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ExampleTests.m; sourceTree = "<group>"; };
0B8DD84DC6774E94A78B341F /* RCTCamera.xcodeproj */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = 9; includeInIndex = 0; lastKnownFileType = "wrapper.pb-project"; name = RCTCamera.xcodeproj; path = "../node_modules/react-native-camera/ios/RCTCamera.xcodeproj"; sourceTree = "<group>"; };
055DCF2769164ECEA190178E /* RCTCamera.xcodeproj */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = 9; includeInIndex = 0; lastKnownFileType = "wrapper.pb-project"; name = RCTCamera.xcodeproj; path = "../node_modules/react-native-camera/ios/RCTCamera.xcodeproj"; sourceTree = "<group>"; };
139105B61AF99BAD00B5F7CC /* RCTSettings.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = RCTSettings.xcodeproj; path = "../node_modules/react-native/Libraries/Settings/RCTSettings.xcodeproj"; sourceTree = "<group>"; };
139FDEE61B06529A00C62182 /* RCTWebSocket.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = RCTWebSocket.xcodeproj; path = "../node_modules/react-native/Libraries/WebSocket/RCTWebSocket.xcodeproj"; sourceTree = "<group>"; };
13B07F961A680F5B00A75B9A /* Example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Example.app; sourceTree = BUILT_PRODUCTS_DIR; };
@ -240,10 +288,13 @@
13B07FB61A68108700A75B9A /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = Info.plist; path = Example/Info.plist; sourceTree = "<group>"; };
13B07FB71A68108700A75B9A /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = main.m; path = Example/main.m; sourceTree = "<group>"; };
146833FF1AC3E56700842450 /* React.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = React.xcodeproj; path = "../node_modules/react-native/React/React.xcodeproj"; sourceTree = "<group>"; };
2420DD75E84D4100A808765F /* libRCTCamera.a */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = 9; includeInIndex = 0; lastKnownFileType = archive.ar; path = libRCTCamera.a; sourceTree = "<group>"; };
2D02E47B1E0B4A5D006451C7 /* Example-tvOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Example-tvOS.app"; sourceTree = BUILT_PRODUCTS_DIR; };
2D02E4901E0B4A5D006451C7 /* Example-tvOSTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "Example-tvOSTests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
5E91572D1DD0AC6500FF2AA8 /* RCTAnimation.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = RCTAnimation.xcodeproj; path = "../node_modules/react-native/Libraries/NativeAnimation/RCTAnimation.xcodeproj"; sourceTree = "<group>"; };
78C398B01ACF4ADC00677621 /* RCTLinking.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = RCTLinking.xcodeproj; path = "../node_modules/react-native/Libraries/LinkingIOS/RCTLinking.xcodeproj"; sourceTree = "<group>"; };
832341B01AAA6A8300B99B32 /* RCTText.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = RCTText.xcodeproj; path = "../node_modules/react-native/Libraries/Text/RCTText.xcodeproj"; sourceTree = "<group>"; };
E3AE52E5006149BA91BB9B89 /* libRCTCamera.a */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = 9; includeInIndex = 0; lastKnownFileType = archive.ar; path = libRCTCamera.a; sourceTree = "<group>"; };
ADBDB91F1DFEBF0600ED6528 /* RCTBlob.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = RCTBlob.xcodeproj; path = "../node_modules/react-native/Libraries/Blob/RCTBlob.xcodeproj"; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@ -259,8 +310,10 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
ADBDB9381DFEBF1600ED6528 /* libRCTBlob.a in Frameworks */,
5E9157361DD0AC6A00FF2AA8 /* libRCTAnimation.a in Frameworks */,
146834051AC3E58100842450 /* libReact.a in Frameworks */,
5E9157361DD0AC6A00FF2AA8 /* libRCTAnimation.a in Frameworks */,
00C302E51ABCBA2D00DB3ED1 /* libRCTActionSheet.a in Frameworks */,
00C302E71ABCBA2D00DB3ED1 /* libRCTGeolocation.a in Frameworks */,
00C302E81ABCBA2D00DB3ED1 /* libRCTImage.a in Frameworks */,
@ -270,7 +323,29 @@
832341BD1AAA6AB300B99B32 /* libRCTText.a in Frameworks */,
00C302EA1ABCBA2D00DB3ED1 /* libRCTVibration.a in Frameworks */,
139FDEF61B0652A700C62182 /* libRCTWebSocket.a in Frameworks */,
19BFA888017F428490C2439E /* libRCTCamera.a in Frameworks */,
C171C414BCA74BDEB03C55AD /* libRCTCamera.a in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
2D02E4781E0B4A5D006451C7 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
2D02E4C91E0B4AEC006451C7 /* libReact-tvOS.a in Frameworks */,
2D02E4C21E0B4AEC006451C7 /* libRCTAnimation.a in Frameworks */,
2D02E4C31E0B4AEC006451C7 /* libRCTImage-tvOS.a in Frameworks */,
2D02E4C41E0B4AEC006451C7 /* libRCTLinking-tvOS.a in Frameworks */,
2D02E4C51E0B4AEC006451C7 /* libRCTNetwork-tvOS.a in Frameworks */,
2D02E4C61E0B4AEC006451C7 /* libRCTSettings-tvOS.a in Frameworks */,
2D02E4C71E0B4AEC006451C7 /* libRCTText-tvOS.a in Frameworks */,
2D02E4C81E0B4AEC006451C7 /* libRCTWebSocket-tvOS.a in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
2D02E48D1E0B4A5D006451C7 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
@ -350,6 +425,8 @@
children = (
139FDEF41B06529B00C62182 /* libRCTWebSocket.a */,
3DAD3E991DF850E9000B6D8A /* libRCTWebSocket-tvOS.a */,
208501581FF9332D00C1AD1F /* libfishhook.a */,
2085015A1FF9332D00C1AD1F /* libfishhook-tvOS.a */,
);
name = Products;
sourceTree = "<group>";
@ -372,13 +449,29 @@
isa = PBXGroup;
children = (
146834041AC3E56700842450 /* libReact.a */,
3DAD3EA31DF850E9000B6D8A /* libReact.a */,
3DAD3EA51DF850E9000B6D8A /* libyoga.a */,
3DAD3EA71DF850E9000B6D8A /* libyoga.a */,
3DAD3EA91DF850E9000B6D8A /* libcxxreact.a */,
3DAD3EAB1DF850E9000B6D8A /* libcxxreact.a */,
3DAD3EAD1DF850E9000B6D8A /* libjschelpers.a */,
3DAD3EAF1DF850E9000B6D8A /* libjschelpers.a */,
3DAD3EA31DF850E9000B6D8A /* libReact-tvOS.a */,
);
name = Products;
sourceTree = "<group>";
};
2085013F1FF9332600C1AD1F /* Recovered References */ = {
isa = PBXGroup;
children = (
2420DD75E84D4100A808765F /* libRCTCamera.a */,
);
name = "Recovered References";
sourceTree = "<group>";
};
2085015B1FF9332D00C1AD1F /* Products */ = {
isa = PBXGroup;
children = (
2085015F1FF9332F00C1AD1F /* libRCTCamera.a */,
);
name = Products;
sourceTree = "<group>";
@ -387,7 +480,7 @@
isa = PBXGroup;
children = (
5E9157331DD0AC6500FF2AA8 /* libRCTAnimation.a */,
5E9157351DD0AC6500FF2AA8 /* libRCTAnimation-tvOS.a */,
5E9157351DD0AC6500FF2AA8 /* libRCTAnimation.a */,
);
name = Products;
sourceTree = "<group>";
@ -407,6 +500,7 @@
5E91572D1DD0AC6500FF2AA8 /* RCTAnimation.xcodeproj */,
146833FF1AC3E56700842450 /* React.xcodeproj */,
00C302A71ABCB8CE00DB3ED1 /* RCTActionSheet.xcodeproj */,
ADBDB91F1DFEBF0600ED6528 /* RCTBlob.xcodeproj */,
00C302B51ABCB90400DB3ED1 /* RCTGeolocation.xcodeproj */,
00C302BB1ABCB91800DB3ED1 /* RCTImage.xcodeproj */,
78C398B01ACF4ADC00677621 /* RCTLinking.xcodeproj */,
@ -415,7 +509,7 @@
832341B01AAA6A8300B99B32 /* RCTText.xcodeproj */,
00C302DF1ABCB9EE00DB3ED1 /* RCTVibration.xcodeproj */,
139FDEE61B06529A00C62182 /* RCTWebSocket.xcodeproj */,
0B8DD84DC6774E94A78B341F /* RCTCamera.xcodeproj */,
055DCF2769164ECEA190178E /* RCTCamera.xcodeproj */,
);
name = Libraries;
sourceTree = "<group>";
@ -436,24 +530,29 @@
832341AE1AAA6A7D00B99B32 /* Libraries */,
00E356EF1AD99517003FC87E /* ExampleTests */,
83CBBA001A601CBA00E9B192 /* Products */,
2085013F1FF9332600C1AD1F /* Recovered References */,
);
indentWidth = 2;
sourceTree = "<group>";
tabWidth = 2;
usesTabs = 0;
};
83CBBA001A601CBA00E9B192 /* Products */ = {
isa = PBXGroup;
children = (
13B07F961A680F5B00A75B9A /* Example.app */,
00E356EE1AD99517003FC87E /* ExampleTests.xctest */,
2D02E47B1E0B4A5D006451C7 /* Example-tvOS.app */,
2D02E4901E0B4A5D006451C7 /* Example-tvOSTests.xctest */,
);
name = Products;
sourceTree = "<group>";
};
BDD99F931D95C105007818E8 /* Products */ = {
ADBDB9201DFEBF0600ED6528 /* Products */ = {
isa = PBXGroup;
children = (
BDD99F981D95C105007818E8 /* libRCTCamera.a */,
ADBDB9271DFEBF0700ED6528 /* libRCTBlob.a */,
208501461FF9332D00C1AD1F /* libRCTBlob-tvOS.a */,
);
name = Products;
sourceTree = "<group>";
@ -497,19 +596,64 @@
productReference = 13B07F961A680F5B00A75B9A /* Example.app */;
productType = "com.apple.product-type.application";
};
2D02E47A1E0B4A5D006451C7 /* Example-tvOS */ = {
isa = PBXNativeTarget;
buildConfigurationList = 2D02E4BA1E0B4A5E006451C7 /* Build configuration list for PBXNativeTarget "Example-tvOS" */;
buildPhases = (
2D02E4771E0B4A5D006451C7 /* Sources */,
2D02E4781E0B4A5D006451C7 /* Frameworks */,
2D02E4791E0B4A5D006451C7 /* Resources */,
2D02E4CB1E0B4B27006451C7 /* Bundle React Native Code And Images */,
);
buildRules = (
);
dependencies = (
);
name = "Example-tvOS";
productName = "Example-tvOS";
productReference = 2D02E47B1E0B4A5D006451C7 /* Example-tvOS.app */;
productType = "com.apple.product-type.application";
};
2D02E48F1E0B4A5D006451C7 /* Example-tvOSTests */ = {
isa = PBXNativeTarget;
buildConfigurationList = 2D02E4BB1E0B4A5E006451C7 /* Build configuration list for PBXNativeTarget "Example-tvOSTests" */;
buildPhases = (
2D02E48C1E0B4A5D006451C7 /* Sources */,
2D02E48D1E0B4A5D006451C7 /* Frameworks */,
2D02E48E1E0B4A5D006451C7 /* Resources */,
);
buildRules = (
);
dependencies = (
2D02E4921E0B4A5D006451C7 /* PBXTargetDependency */,
);
name = "Example-tvOSTests";
productName = "Example-tvOSTests";
productReference = 2D02E4901E0B4A5D006451C7 /* Example-tvOSTests.xctest */;
productType = "com.apple.product-type.bundle.unit-test";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
83CBB9F71A601CBA00E9B192 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0610;
LastUpgradeCheck = 610;
ORGANIZATIONNAME = Facebook;
TargetAttributes = {
00E356ED1AD99517003FC87E = {
CreatedOnToolsVersion = 6.2;
TestTargetID = 13B07F861A680F5B00A75B9A;
};
2D02E47A1E0B4A5D006451C7 = {
CreatedOnToolsVersion = 8.2.1;
ProvisioningStyle = Automatic;
};
2D02E48F1E0B4A5D006451C7 = {
CreatedOnToolsVersion = 8.2.1;
ProvisioningStyle = Automatic;
TestTargetID = 2D02E47A1E0B4A5D006451C7;
};
};
};
buildConfigurationList = 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "Example" */;
@ -533,8 +677,12 @@
ProjectRef = 5E91572D1DD0AC6500FF2AA8 /* RCTAnimation.xcodeproj */;
},
{
ProductGroup = BDD99F931D95C105007818E8 /* Products */;
ProjectRef = 0B8DD84DC6774E94A78B341F /* RCTCamera.xcodeproj */;
ProductGroup = ADBDB9201DFEBF0600ED6528 /* Products */;
ProjectRef = ADBDB91F1DFEBF0600ED6528 /* RCTBlob.xcodeproj */;
},
{
ProductGroup = 2085015B1FF9332D00C1AD1F /* Products */;
ProjectRef = 055DCF2769164ECEA190178E /* RCTCamera.xcodeproj */;
},
{
ProductGroup = 00C302B61ABCB90400DB3ED1 /* Products */;
@ -577,6 +725,8 @@
targets = (
13B07F861A680F5B00A75B9A /* Example */,
00E356ED1AD99517003FC87E /* ExampleTests */,
2D02E47A1E0B4A5D006451C7 /* Example-tvOS */,
2D02E48F1E0B4A5D006451C7 /* Example-tvOSTests */,
);
};
/* End PBXProject section */
@ -638,6 +788,34 @@
remoteRef = 146834031AC3E56700842450 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
208501461FF9332D00C1AD1F /* libRCTBlob-tvOS.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
path = "libRCTBlob-tvOS.a";
remoteRef = 208501451FF9332D00C1AD1F /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
208501581FF9332D00C1AD1F /* libfishhook.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
path = libfishhook.a;
remoteRef = 208501571FF9332D00C1AD1F /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
2085015A1FF9332D00C1AD1F /* libfishhook-tvOS.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
path = "libfishhook-tvOS.a";
remoteRef = 208501591FF9332D00C1AD1F /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
2085015F1FF9332F00C1AD1F /* libRCTCamera.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
path = libRCTCamera.a;
remoteRef = 2085015E1FF9332F00C1AD1F /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
3DAD3E841DF850E9000B6D8A /* libRCTImage-tvOS.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
@ -680,10 +858,10 @@
remoteRef = 3DAD3E981DF850E9000B6D8A /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
3DAD3EA31DF850E9000B6D8A /* libReact.a */ = {
3DAD3EA31DF850E9000B6D8A /* libReact-tvOS.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
path = libReact.a;
path = "libReact-tvOS.a";
remoteRef = 3DAD3EA21DF850E9000B6D8A /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
@ -736,10 +914,10 @@
remoteRef = 5E9157321DD0AC6500FF2AA8 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
5E9157351DD0AC6500FF2AA8 /* libRCTAnimation-tvOS.a */ = {
5E9157351DD0AC6500FF2AA8 /* libRCTAnimation.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
path = "libRCTAnimation-tvOS.a";
path = libRCTAnimation.a;
remoteRef = 5E9157341DD0AC6500FF2AA8 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
@ -757,11 +935,11 @@
remoteRef = 832341B41AAA6A8300B99B32 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
BDD99F981D95C105007818E8 /* libRCTCamera.a */ = {
ADBDB9271DFEBF0700ED6528 /* libRCTBlob.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
path = libRCTCamera.a;
remoteRef = BDD99F971D95C105007818E8 /* PBXContainerItemProxy */;
path = libRCTBlob.a;
remoteRef = ADBDB9261DFEBF0700ED6528 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
/* End PBXReferenceProxy section */
@ -783,6 +961,21 @@
);
runOnlyForDeploymentPostprocessing = 0;
};
2D02E4791E0B4A5D006451C7 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
2D02E4BD1E0B4A84006451C7 /* Images.xcassets in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
2D02E48E1E0B4A5D006451C7 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXShellScriptBuildPhase section */
@ -798,7 +991,21 @@
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "export NODE_BINARY=node\n../node_modules/react-native/packager/react-native-xcode.sh";
shellScript = "export NODE_BINARY=node\n../node_modules/react-native/scripts/react-native-xcode.sh";
};
2D02E4CB1E0B4B27006451C7 /* Bundle React Native Code And Images */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Bundle React Native Code And Images";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "export NODE_BINARY=node\n../node_modules/react-native/scripts/react-native-xcode.sh";
};
/* End PBXShellScriptBuildPhase section */
@ -820,6 +1027,23 @@
);
runOnlyForDeploymentPostprocessing = 0;
};
2D02E4771E0B4A5D006451C7 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
2D02E4BF1E0B4AB3006451C7 /* main.m in Sources */,
2D02E4BC1E0B4A80006451C7 /* AppDelegate.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
2D02E48C1E0B4A5D006451C7 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
2DCD954D1E0B4F2C00145EB5 /* ExampleTests.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
@ -828,6 +1052,11 @@
target = 13B07F861A680F5B00A75B9A /* Example */;
targetProxy = 00E356F41AD99517003FC87E /* PBXContainerItemProxy */;
};
2D02E4921E0B4A5D006451C7 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
target = 2D02E47A1E0B4A5D006451C7 /* Example-tvOS */;
targetProxy = 2D02E4911E0B4A5D006451C7 /* PBXContainerItemProxy */;
};
/* End PBXTargetDependency section */
/* Begin PBXVariantGroup section */
@ -847,11 +1076,14 @@
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
DEVELOPMENT_TEAM = "";
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
HEADER_SEARCH_PATHS = (
"$(inherited)",
"$(SRCROOT)/../node_modules/react-native-camera/ios/**",
);
INFOPLIST_FILE = ExampleTests/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
@ -859,6 +1091,10 @@
"$(inherited)",
"\"$(SRCROOT)/$(TARGET_NAME)\"",
);
OTHER_LDFLAGS = (
"-ObjC",
"-lc++",
);
PRODUCT_NAME = "$(TARGET_NAME)";
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Example.app/Example";
};
@ -869,7 +1105,10 @@
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
COPY_PHASE_STRIP = NO;
DEVELOPMENT_TEAM = "";
HEADER_SEARCH_PATHS = (
"$(inherited)",
"$(SRCROOT)/../node_modules/react-native-camera/ios/**",
);
INFOPLIST_FILE = ExampleTests/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
@ -877,6 +1116,10 @@
"$(inherited)",
"\"$(SRCROOT)/$(TARGET_NAME)\"",
);
OTHER_LDFLAGS = (
"-ObjC",
"-lc++",
);
PRODUCT_NAME = "$(TARGET_NAME)";
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Example.app/Example";
};
@ -888,11 +1131,8 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CURRENT_PROJECT_VERSION = 1;
DEAD_CODE_STRIPPING = NO;
DEVELOPMENT_TEAM = "";
HEADER_SEARCH_PATHS = (
"$(inherited)",
/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include,
"$(SRCROOT)/../node_modules/react-native/React/**",
"$(SRCROOT)/../node_modules/react-native-camera/ios/**",
);
INFOPLIST_FILE = Example/Info.plist;
@ -912,11 +1152,8 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = "";
HEADER_SEARCH_PATHS = (
"$(inherited)",
/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include,
"$(SRCROOT)/../node_modules/react-native/React/**",
"$(SRCROOT)/../node_modules/react-native-camera/ios/**",
);
INFOPLIST_FILE = Example/Info.plist;
@ -931,6 +1168,124 @@
};
name = Release;
};
2D02E4971E0B4A5E006451C7 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = "App Icon & Top Shelf Image";
ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage;
CLANG_ANALYZER_NONNULL = YES;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_TESTABILITY = YES;
GCC_NO_COMMON_BLOCKS = YES;
HEADER_SEARCH_PATHS = (
"$(inherited)",
"$(SRCROOT)/../node_modules/react-native-camera/ios/**",
);
INFOPLIST_FILE = "Example-tvOS/Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"\"$(SRCROOT)/$(TARGET_NAME)\"",
);
OTHER_LDFLAGS = (
"-ObjC",
"-lc++",
);
PRODUCT_BUNDLE_IDENTIFIER = "com.facebook.REACT.Example-tvOS";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = appletvos;
TARGETED_DEVICE_FAMILY = 3;
TVOS_DEPLOYMENT_TARGET = 9.2;
};
name = Debug;
};
2D02E4981E0B4A5E006451C7 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = "App Icon & Top Shelf Image";
ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage;
CLANG_ANALYZER_NONNULL = YES;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
GCC_NO_COMMON_BLOCKS = YES;
HEADER_SEARCH_PATHS = (
"$(inherited)",
"$(SRCROOT)/../node_modules/react-native-camera/ios/**",
);
INFOPLIST_FILE = "Example-tvOS/Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"\"$(SRCROOT)/$(TARGET_NAME)\"",
);
OTHER_LDFLAGS = (
"-ObjC",
"-lc++",
);
PRODUCT_BUNDLE_IDENTIFIER = "com.facebook.REACT.Example-tvOS";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = appletvos;
TARGETED_DEVICE_FAMILY = 3;
TVOS_DEPLOYMENT_TARGET = 9.2;
};
name = Release;
};
2D02E4991E0B4A5E006451C7 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CLANG_ANALYZER_NONNULL = YES;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_TESTABILITY = YES;
GCC_NO_COMMON_BLOCKS = YES;
INFOPLIST_FILE = "Example-tvOSTests/Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"\"$(SRCROOT)/$(TARGET_NAME)\"",
);
PRODUCT_BUNDLE_IDENTIFIER = "com.facebook.REACT.Example-tvOSTests";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = appletvos;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Example-tvOS.app/Example-tvOS";
TVOS_DEPLOYMENT_TARGET = 10.1;
};
name = Debug;
};
2D02E49A1E0B4A5E006451C7 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CLANG_ANALYZER_NONNULL = YES;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
GCC_NO_COMMON_BLOCKS = YES;
INFOPLIST_FILE = "Example-tvOSTests/Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"\"$(SRCROOT)/$(TARGET_NAME)\"",
);
PRODUCT_BUNDLE_IDENTIFIER = "com.facebook.REACT.Example-tvOSTests";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = appletvos;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Example-tvOS.app/Example-tvOS";
TVOS_DEPLOYMENT_TARGET = 10.1;
};
name = Release;
};
83CBBA201A601CBA00E9B192 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
@ -965,7 +1320,6 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = "";
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
@ -1001,7 +1355,6 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = "";
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
@ -1030,6 +1383,24 @@
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
2D02E4BA1E0B4A5E006451C7 /* Build configuration list for PBXNativeTarget "Example-tvOS" */ = {
isa = XCConfigurationList;
buildConfigurations = (
2D02E4971E0B4A5E006451C7 /* Debug */,
2D02E4981E0B4A5E006451C7 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
2D02E4BB1E0B4A5E006451C7 /* Build configuration list for PBXNativeTarget "Example-tvOSTests" */ = {
isa = XCConfigurationList;
buildConfigurations = (
2D02E4991E0B4A5E006451C7 /* Debug */,
2D02E49A1E0B4A5E006451C7 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "Example" */ = {
isa = XCConfigurationList;
buildConfigurations = (

View File

@ -0,0 +1,129 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0820"
version = "1.3">
<BuildAction
parallelizeBuildables = "NO"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D2A28121D9B038B00D4039D"
BuildableName = "libReact.a"
BlueprintName = "React-tvOS"
ReferencedContainer = "container:../node_modules/react-native/React/React.xcodeproj">
</BuildableReference>
</BuildActionEntry>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E47A1E0B4A5D006451C7"
BuildableName = "Example-tvOS.app"
BlueprintName = "Example-tvOS"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</BuildActionEntry>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "NO"
buildForArchiving = "NO"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E48F1E0B4A5D006451C7"
BuildableName = "Example-tvOSTests.xctest"
BlueprintName = "Example-tvOSTests"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
<TestableReference
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E48F1E0B4A5D006451C7"
BuildableName = "Example-tvOSTests.xctest"
BlueprintName = "Example-tvOSTests"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</TestableReference>
</Testables>
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E47A1E0B4A5D006451C7"
BuildableName = "Example-tvOS.app"
BlueprintName = "Example-tvOS"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</MacroExpansion>
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E47A1E0B4A5D006451C7"
BuildableName = "Example-tvOS.app"
BlueprintName = "Example-tvOS"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E47A1E0B4A5D006451C7"
BuildableName = "Example-tvOS.app"
BlueprintName = "Example-tvOS"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View File

@ -18,7 +18,7 @@
{
NSURL *jsCodeLocation;
jsCodeLocation = [[RCTBundleURLProvider sharedSettings] jsBundleURLForBundleRoot:@"index.ios" fallbackResource:nil];
jsCodeLocation = [[RCTBundleURLProvider sharedSettings] jsBundleURLForBundleRoot:@"index" fallbackResource:nil];
RCTRootView *rootView = [[RCTRootView alloc] initWithBundleURL:jsCodeLocation
moduleName:@"Example"

View File

@ -1,15 +1,5 @@
{
"images" : [
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "29x29",
@ -39,6 +29,11 @@
"idiom" : "iphone",
"size" : "60x60",
"scale" : "3x"
},
{
"idiom" : "ios-marketing",
"size" : "1024x1024",
"scale" : "1x"
}
],
"info" : {

View File

@ -0,0 +1,6 @@
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}

View File

@ -4,10 +4,12 @@
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleDisplayName</key>
<string>Example</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier)</string>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
@ -22,12 +24,25 @@
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSExceptionDomains</key>
<dict>
<key>localhost</key>
<dict>
<key>NSExceptionAllowsInsecureHTTPLoads</key>
<true/>
</dict>
</dict>
</dict>
<key>NSCameraUsageDescription</key>
<string>This app needs access to the camera to take photos.</string>
<key>NSLocationWhenInUseUsageDescription</key>
<string>This app needs access to the location to geolocalize your captures.</string>
<string></string>
<key>NSMicrophoneUsageDescription</key>
<string>This app needs access to the microphone to take videos.</string>
<key>NSPhotoLibraryAddUsageDescription</key>
<string>Your message to user when the photo library is accessed for the first time</string>
<key>NSPhotoLibraryUsageDescription</key>
<string>This app needs access to the photo library to save your captures.</string>
<key>UILaunchStoryboardName</key>
@ -44,19 +59,5 @@
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
<key>NSLocationWhenInUseUsageDescription</key>
<string></string>
<key>NSAppTransportSecurity</key>
<!--See http://ste.vn/2015/06/10/configuring-app-transport-security-ios-9-osx-10-11/ -->
<dict>
<key>NSExceptionDomains</key>
<dict>
<key>localhost</key>
<dict>
<key>NSExceptionAllowsInsecureHTTPLoads</key>
<true/>
</dict>
</dict>
</dict>
</dict>
</plist>

View File

@ -37,7 +37,7 @@
- (void)testRendersWelcomeScreen
{
UIViewController *vc = [[[[UIApplication sharedApplication] delegate] window] rootViewController];
UIViewController *vc = [[[RCTSharedApplication() delegate] window] rootViewController];
NSDate *date = [NSDate dateWithTimeIntervalSinceNow:TIMEOUT_SECONDS];
BOOL foundElement = NO;

View File

@ -3,11 +3,22 @@
"version": "0.0.1",
"private": true,
"scripts": {
"start": "node node_modules/react-native/local-cli/cli.js start"
"start": "node node_modules/react-native/local-cli/cli.js start",
"test": "jest",
"clear": "node node_modules/react-native/local-cli/cli.js start --reset-cache"
},
"dependencies": {
"react": "~15.4.0",
"react-native": "~0.40.0",
"react": "16.0.0",
"react-native": "0.51.0",
"react-native-camera": "file:../"
},
"devDependencies": {
"babel-jest": "22.0.4",
"babel-preset-react-native": "4.0.0",
"jest": "22.0.4",
"react-test-renderer": "16.0.0"
},
"jest": {
"preset": "react-native"
}
}

4622
Example/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

379
README.md
View File

@ -1,21 +1,26 @@
# React Native Camera [![Backers on Open Collective](https://opencollective.com/react-native-camera/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/react-native-camera/sponsors/badge.svg)](#sponsors) [![npm version](https://badge.fury.io/js/react-native-camera.svg)](http://badge.fury.io/js/react-native-camera) [![npm downloads](https://img.shields.io/npm/dm/react-native-camera.svg)](https://www.npmjs.com/package/react-native-camera)
# React Native Camera [![Backers on Open Collective](https://opencollective.com/react-native-camera/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/react-native-camera/sponsors/badge.svg)](#sponsors) [![npm version](https://badge.fury.io/js/react-native-camera.svg)](http://badge.fury.io/js/react-native-camera) [![Gitter](https://badges.gitter.im/lwansbrough/react-native-camera.svg)](https://gitter.im/lwansbrough/react-native-camera)
The comprehensive camera module for React Native. Including photographs, videos, face detection and barcode scanning!
The comprehensive camera module for React Native. Including photographs, videos, and barcode scanning!
`import { RNCamera, FaceDetector } from 'react-native-camera';`
#### How to use master branch?
Inside your package.json, use this
`"react-native-camera": "git+https://git@github.com/react-native-community/react-native-camera"`
instead of `"react-native-camera": "^1.0.0"`.
### Contributing
- Pull Requests are welcome, if you open a pull request we will do our best to get to it in a timely manner
- Pull Request Reviews and even more welcome! we need help testing, reviewing, and updating open PRs
- If you are interested in contributing more actively, please contact me (same username on Twitter, Facebook, etc.) Thanks!
- We are now on [Open Collective](https://opencollective.com/react-native-camera#sponsor)! Contributions are appreciated and will be used to fund core contributors. [more details](#open-collective)
#### Breaking Changes
##### android build tools has been bumped to 25.0.2, please update (can be done via android cli or AndroidStudio)
##### react-native header imports have changed in v0.40, and that means breaking changes for all! [Reference PR & Discussion](https://github.com/lwansbrough/react-native-camera/pull/544).
- if on react-native < 0.40: `npm i react-native-camera@0.4`
- if on react-native >= 0.40 `npm i react-native-camera@0.6`
- We are now on [Open Collective](https://opencollective.com/react-native-camera#sponsor)! Contributions are appreciated and will be used to fund core contributors. [more details](#open-collective)
- If you want to help us coding, join Expo slack https://slack.expo.io/, so we can chat over there. (#react-native-camera)
##### Permissions
To use the camera on Android you must ask for camera permission:
```java
<uses-permission android:name="android.permission.CAMERA" />
```
To enable `video recording` feature you have to add the following code to the `AndroidManifest.xml`:
```java
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
@ -26,11 +31,21 @@ To enable `video recording` feature you have to add the following code to the `A
![5j2jduk](https://cloud.githubusercontent.com/assets/2302315/22190752/6bc6ccd0-e0da-11e6-8e2f-6f22a3567a57.gif)
## Migrating from RCTCamera to RNCamera
See this [doc](./docs/migration.md)
### RNCamera Docs
[RNCamera](./docs/RNCamera.md)
### Docs old RCTCamera
[RCTCamera](./docs/RCTCamera.md)
## Getting started
### Requirements
1. JDK >= 1.7 (if you run on 1.6 you will get an error on "_cameras = new HashMap<>();")
2. With iOS 10 and higher you need to add the "Privacy - Camera Usage Description" key to the info.plist of your project. This should be found in 'your_project/ios/your_project/Info.plist'. Add the following code:
2. With iOS 10 and higher you need to add the "Privacy - Camera Usage Description" key to the Info.plist of your project. This should be found in 'your_project/ios/your_project/Info.plist'. Add the following code:
```
<key>NSCameraUsageDescription</key>
<string>Your message to user when the camera is accessed for the first time</string>
@ -41,7 +56,7 @@ To enable `video recording` feature you have to add the following code to the `A
<!-- Include this only if you are planning to use the microphone for video recording -->
<key>NSMicrophoneUsageDescription</key>
<string>Your message to user when the microsphone is accessed for the first time</string>
<string>Your message to user when the microphone is accessed for the first time</string>
```
3. On Android, you require `buildToolsVersion` of `25.0.2+`. _This should easily and automatically be downloaded by Android Studio's SDK Manager._
@ -52,8 +67,6 @@ To enable `video recording` feature you have to add the following code to the `A
<string>Your message to user when the photo library is accessed for the first time</string>
```
NSPhotoLibraryAddUsageDescription
### Mostly automatic install with react-native
1. `npm install react-native-camera --save`
3. `react-native link react-native-camera`
@ -70,17 +83,61 @@ pod 'react-native-camera', path: '../node_modules/react-native-camera'
#### iOS
1. `npm install react-native-camera --save`
2. In XCode, in the project navigator, right click `Libraries``Add Files to [your project's name]`
3. Go to `node_modules``react-native-camera` and add `RCTCamera.xcodeproj`
4. In XCode, in the project navigator, select your project. Add `libRCTCamera.a` to your project's `Build Phases``Link Binary With Libraries`
5. Click `RCTCamera.xcodeproj` in the project navigator and go the `Build Settings` tab. Make sure 'All' is toggled on (instead of 'Basic'). In the `Search Paths` section, look for `Header Search Paths` and make sure it contains both `$(SRCROOT)/../../react-native/React` and `$(SRCROOT)/../../../React` - mark both as `recursive`.
5. Run your project (`Cmd+R`)
3. Go to `node_modules``react-native-camera` and add `RNCamera.xcodeproj`
4. In XCode, in the project navigator, select your project. Add `libRNCamera.a` to your project's `Build Phases``Link Binary With Libraries`
5. Click `RNCamera.xcodeproj` in the project navigator and go the `Build Settings` tab. Make sure 'All' is toggled on (instead of 'Basic'). In the `Search Paths` section, look for `Header Search Paths` and make sure it contains both `$(SRCROOT)/../../react-native/React` and `$(SRCROOT)/../../../React` - mark both as `recursive`.
### Face Detection Steps
Face Detecion is optional on iOS. If you want it, you are going to need to install Google Mobile Vision frameworks in your project, as mentioned in the next section.
##### No Face Detection steps
If you do not need it and do not wnat to install the GMV frameworks, open your app xcode project, on the Project Navigator, expand the RNCamera project, right click on the FaceDetector folder and delete it (move to trash, if you want). If you keep that folder and do not follow the GMV installation setps, your project will not compile.
If you want to make this automatic, you can add a postinstall script to your app `package.json`. Inside the `postinstall_project` there is a xcode project ready with the folder removed (we opened xcode, removed the folder from the project and copied the resulting project file). The post install script is:
```
#!/bin/bash
echo "Creating project without FaceDetector"
if [ -e node_modules/react-native-camera/ios/FaceDetector ] ; then
rm -rf node_modules/react-native-camera/ios/FaceDetector
fi
cp node_modules/react-native-camera/postinstall_project/projectWithoutFaceDetection.pbxproj node_modules/react-native-camera/ios/RNCamera.xcodeproj/project.pbxproj
```
And add something like this to the `scripts` section in your `package.json`:
```
"postinstall": "./scripts/post.sh",
```
##### Installing GMV frameworks
GMV (Google Mobile Vision) is used for Face detection by the iOS RNCamera. You have to link the google frameworks to your project to successfully compile the RNCamera project.
1. Download:
Google Symbol Utilities: https://www.gstatic.com/cpdc/dbffca986f6337f8-GoogleSymbolUtilities-1.1.1.tar.gz
Google Utilities: https://dl.google.com/dl/cpdc/978f81964b50a7c0/GoogleUtilities-1.3.2.tar.gz
Google Mobile Vision: https://dl.google.com/dl/cpdc/df83c97cbca53eaf/GoogleMobileVision-1.1.0.tar.gz
Google network Utilities: https://dl.google.com/dl/cpdc/54fd7b7ef8fd3edc/GoogleNetworkingUtilities-1.2.2.tar.gz
Google Interchange Utilities: https://dl.google.com/dl/cpdc/1a7f7ba905b2c029/GoogleInterchangeUtilities-1.2.2.tar.gz
2. Extract everything to one folder. Delete "BarcodeDetector" and "copy" folders from Google Mobile Vision.
3. Open XCode, right click on your project and choose "New Group". Rename the new folder to "Frameworks". Right click on "Frameworks" and select "add files to 'YOUR_PROJECT'". Select all content from the folder of step 2, click on Options. Select "Copy items if needed", leave "Create groups" selected and choose all your targets on the "Add to targets" section. Then, click on "Add".
4. On your target -> Build Phases -> Link Binary with Libraries -> add AddressBook.framework
5. On your target -> Build Settings -> Other Linker Flags -> add -lz, -ObjC and -lc++
6. To force indexing and prevent erros, restart xcode and reopen your project again before compiling.
#### Android
1. `npm install react-native-camera --save`
2. Open up `android/app/src/main/java/[...]/MainApplication.java
- Add `import com.lwansbrough.RCTCamera.RCTCameraPackage;` to the imports at the top of the file
- Add `new RCTCameraPackage()` to the list returned by the `getPackages()` method. Add a comma to the previous item if there's already something there.
- Add `import org.reactnative.camera.RNCameraPackage;` to the imports at the top of the file
- Add `new RNCameraPackage()` to the list returned by the `getPackages()` method. Add a comma to the previous item if there's already something there.
3. Append the following lines to `android/settings.gradle`:
@ -92,7 +149,12 @@ pod 'react-native-camera', path: '../node_modules/react-native-camera'
4. Insert the following lines inside the dependencies block in `android/app/build.gradle`:
```gradle
compile project(':react-native-camera')
compile (project(':react-native-camera')) {
exclude group: "com.google.android.gms"
}
compile ("com.google.android.gms:play-services-vision:10.2.0") {
force = true;
}
```
5. Declare the permissions in your Android Manifest (required for `video recording` feature)
@ -102,279 +164,26 @@ pod 'react-native-camera', path: '../node_modules/react-native-camera'
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
```
## Usage
All you need is to `require` the `react-native-camera` module and then use the
`<Camera/>` tag.
```javascript
'use strict';
import React, { Component } from 'react';
import {
AppRegistry,
Dimensions,
StyleSheet,
Text,
TouchableHighlight,
View
} from 'react-native';
import Camera from 'react-native-camera';
class BadInstagramCloneApp extends Component {
render() {
return (
<View style={styles.container}>
<Camera
ref={(cam) => {
this.camera = cam;
}}
style={styles.preview}
aspect={Camera.constants.Aspect.fill}>
<Text style={styles.capture} onPress={this.takePicture.bind(this)}>[CAPTURE]</Text>
</Camera>
</View>
);
}
takePicture() {
const options = {};
//options.location = ...
this.camera.capture({metadata: options})
.then((data) => console.log(data))
.catch(err => console.error(err));
}
6. Add jitpack to android/build.gradle
```gradle
allprojects {
repositories {
maven { url "https://jitpack.io" }
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'row',
},
preview: {
flex: 1,
justifyContent: 'flex-end',
alignItems: 'center'
},
capture: {
flex: 0,
backgroundColor: '#fff',
borderRadius: 5,
color: '#000',
padding: 10,
margin: 40
}
});
AppRegistry.registerComponent('BadInstagramCloneApp', () => BadInstagramCloneApp);
```
## Properties
Follow the [Q & A](./docs/QA.md) section if you are having compilation issues.
#### `aspect`
## Usage
Values: `Camera.constants.Aspect.fit` or `"fit"`, `Camera.constants.Aspect.fill` or `"fill"` (default), `Camera.constants.Aspect.stretch` or `"stretch"`
### RNCamera
The `aspect` property allows you to define how your viewfinder renders the camera's view. For instance, if you have a square viewfinder and you want to fill it entirely, you have two options: `"fill"`, where the aspect ratio of the camera's view is preserved by cropping the view or `"stretch"`, where the aspect ratio is skewed in order to fit the entire image inside the viewfinder. The other option is `"fit"`, which ensures the camera's entire view fits inside your viewfinder without altering the aspect ratio.
Take a look into this [documentation](./docs/RNCamera.md).
#### `iOS` `audio`
Values: `true` (Boolean), `false` (default)
*Applies to video capture mode only.* Specifies whether or not audio should be captured with the video.
#### `captureMode`
Values: `Camera.constants.CaptureMode.still` (default), `Camera.constants.CaptureMode.video`
The type of capture that will be performed by the camera - either a still image or video.
#### `captureTarget`
Values: `Camera.constants.CaptureTarget.cameraRoll` (default), `Camera.constants.CaptureTarget.disk`, `Camera.constants.CaptureTarget.temp`, ~~`Camera.constants.CaptureTarget.memory`~~ (deprecated),
This property allows you to specify the target output of the captured image data. The disk output has been shown to improve capture response time, so that is the recommended value. When using the deprecated memory output, the image binary is sent back as a base64-encoded string.
#### `captureQuality`
Values: `Camera.constants.CaptureQuality.high` or `"high"` (default), `Camera.constants.CaptureQuality.medium` or `"medium"`, `Camera.constants.CaptureQuality.low` or `"low"`, `Camera.constants.CaptureQuality.photo` or `"photo"`, `Camera.constants.CaptureQuality["1080p"]` or `"1080p"`, `Camera.constants.CaptureQuality["720p"]` or `"720p"`, `Camera.constants.CaptureQuality["480p"]` or `"480p"`.
This property allows you to specify the quality output of the captured image or video. By default the quality is set to high.
When choosing more-specific quality settings (1080p, 720p, 480p), note that each platform and device supports different valid picture/video sizes, and actual resolution within each of these quality settings might differ. There should not be too much variance (if any) for iOS; 1080p should give 1920x1080, 720p should give 1280x720, and 480p should give 640x480 (note that iOS 480p therefore is NOT the typical 16:9 HD aspect ratio, and the typically-HD camera preview screen may differ greatly in aspect from what you actually record!!). For Android, expect more variance: on most Androids, 1080p *should* give 1920x1080 and 720p *should* give 1280x720; however, 480p will at "best" be 853x480 (16:9 HD aspect ratio), but falls back/down to 800x480, 720x480, or "worse", depending on what is closest-but-less-than 853x480 and available on the actual device. If your application requires knowledge of the precise resolution of the output image/video, you might consider manually determine the actual resolution itself after capture has completed (particularly for 480p on Android).
Android also supports `Camera.constants.CaptureQuality.preview` or `"preview"` which matches the output image to the same one used in the preview
#### `type`
Values: `Camera.constants.Type.front` or `"front"`, `Camera.constants.Type.back` or `"back"` (default)
Use the `type` property to specify which camera to use.
#### `orientation`
Values:
`Camera.constants.Orientation.auto` or `"auto"` (default),
`Camera.constants.Orientation.landscapeLeft` or `"landscapeLeft"`, `Camera.constants.Orientation.landscapeRight` or `"landscapeRight"`, `Camera.constants.Orientation.portrait` or `"portrait"`, `Camera.constants.Orientation.portraitUpsideDown` or `"portraitUpsideDown"`
The `orientation` property allows you to specify the current orientation of the phone to ensure the viewfinder is "the right way up."
#### `Android` `playSoundOnCapture`
Values: `true` (default) or `false`
This property allows you to specify whether a shutter sound is played on capture. It is currently android only, pending [a reasonable mute implementation](http://stackoverflow.com/questions/4401232/avfoundation-how-to-turn-off-the-shutter-sound-when-capturestillimageasynchrono) in iOS.
#### `onBarCodeRead`
Will call the specified method when a barcode is detected in the camera's view.
Event contains `data` (the data in the barcode) and `bounds` (the rectangle which outlines the barcode.)
The following barcode types can be recognised:
- `aztec`
- `code128`
- `code39`
- `code39mod43`
- `code93`
- `ean13`
- `ean8`
- `pdf417`
- `qr`
- `upce`
- `interleaved2of5` (when available)
- `itf14` (when available)
- `datamatrix` (when available)
The barcode type is provided in the `data` object.
#### `barCodeTypes`
An array of barcode types to search for. Defaults to all types listed above. No effect if `onBarCodeRead` is undefined.
#### `flashMode`
Values:
`Camera.constants.FlashMode.on`,
`Camera.constants.FlashMode.off`,
`Camera.constants.FlashMode.auto`
Use the `flashMode` property to specify the camera flash mode.
#### `torchMode`
Values:
`Camera.constants.TorchMode.on`,
`Camera.constants.TorchMode.off`,
`Camera.constants.TorchMode.auto`
Use the `torchMode` property to specify the camera torch mode.
#### `iOS` `onFocusChanged: Event { nativeEvent: { touchPoint: { x, y } }`
iOS: Called when a touch focus gesture has been made.
By default, `onFocusChanged` is not defined and tap-to-focus is disabled.
Android: This callback is not yet implemented. However, Android will
automatically do tap-to-focus if the device supports auto-focus; there is
currently no way to manage this from javascript.
#### `iOS` `defaultOnFocusComponent`
Values:
`true` (default)
`false`
If `defaultOnFocusComponent` set to false, default internal implementation of visual feedback for tap-to-focus gesture will be disabled.
#### `iOS` `onZoomChanged: Event { nativeEvent: { velocity, zoomFactor } }`
iOS: Called when focus has changed.
By default, `onZoomChanged` is not defined and pinch-to-zoom is disabled.
Android: This callback is not yet implemented. However, Android will
automatically handle pinch-to-zoom; there is currently no way to manage this
from javascript.
#### `iOS` `keepAwake`
If set to `true`, the device will not sleep while the camera preview is visible. This mimics the behavior of the default camera app, which keeps the device awake while open.
#### `mirrorImage`
If set to `true`, the image returned will be mirrored.
#### `fixOrientation` (_deprecated_)
If set to `true`, the image returned will be rotated to the _right way up_. WARNING: It uses a significant amount of memory and my cause your application to crash if the device cannot provide enough RAM to perform the rotation.
(_If you find that you need to use this option because your images are incorrectly oriented by default,
could please submit a PR and include the make model of the device. We believe that it's not
required functionality any more and would like to remove it._)
## Component instance methods
You can access component methods by adding a `ref` (ie. `ref="camera"`) prop to your `<Camera>` element, then you can use `this.refs.camera.capture(cb)`, etc. inside your component.
#### `capture([options]): Promise`
Captures data from the camera. What is captured is based on the `captureMode` and `captureTarget` props. `captureMode` tells the camera whether you want a still image or video. `captureTarget` allows you to specify how you want the data to be captured and sent back to you. See `captureTarget` under Properties to see the available values.
Supported options:
- `audio` (See `captureAudio` under Properties)
- `mode` (See `captureMode` under Properties)
- `target` (See `captureTarget` under Properties)
- `metadata` This is metadata to be added to the captured image.
- `location` This is the object returned from `navigator.geolocation.getCurrentPosition()` (React Native's geolocation polyfill). It will add GPS metadata to the image.
- `rotation` This will rotate the image by the number of degrees specified.
- `jpegQuality` (integer between 1 and 100) This property is used to compress the output jpeg file with 100% meaning no jpeg compression will be applied.
- `totalSeconds` This will limit video length by number of seconds specified. Only works in video capture mode.
The promise will be fulfilled with an object with some of the following properties:
- `data`: Returns a base64-encoded string with the capture data (only returned with the deprecated `Camera.constants.CaptureTarget.memory`)
- `path`: Returns the path of the captured image or video file on disk
- `width`: (currently iOS video only) returns the video file's frame width
- `height`: (currently iOS video only) returns the video file's frame height
- `duration`: (currently iOS video only) video file duration
- `size`: (currently iOS video only) video file size (in bytes)
#### `iOS` `getFOV(): Promise`
Returns the camera's current field of view.
#### `hasFlash(): Promise`
Returns whether or not the camera has flash capabilities.
#### `stopCapture()`
Ends the current capture session for video captures. Only applies when the current `captureMode` is `video`.
## Component static methods
#### `iOS` `Camera.checkDeviceAuthorizationStatus(): Promise`
Exposes the native API for checking if the device has authorized access to the camera (camera and microphone permissions). Can be used to call before loading the Camera component to ensure proper UX. The promise will be fulfilled with `true` or `false` depending on whether the device is authorized. Note, [as of iOS 10](https://developer.apple.com/library/content/documentation/AudioVideo/Conceptual/PhotoCaptureGuide/#//apple_ref/doc/uid/TP40017511-CH1-DontLinkElementID_3), you will need to add `NSCameraUsageDescription` and `NSMicrophoneUsageDescription` to your XCode project's Info.plist file or you might experience a crash.
#### `iOS` `Camera.checkVideoAuthorizationStatus(): Promise`
The same as `Camera.checkDeviceAuthorizationStatus()` but only checks the camera permission. Note, as of iOS 10, you will need to add `NSCameraUsageDescription` to your XCode project's Info.plist file or you might experience a crash.
#### `iOS` `Camera.checkAudioAuthorizationStatus(): Promise`
The same as `Camera.checkDeviceAuthorizationStatus()` but only checks the microphone permission. Note, as of iOS 10, you will need to add `NSMicrophoneUsageDescription` to your XCode project's Info.plist file or you might experience a crash.
## Subviews
This component supports subviews, so if you wish to use the camera view as a background or if you want to layout buttons/images/etc. inside the camera then you can do that.
## Example
To see more of the `react-native-camera` in action, you can check out the source in [Example](https://github.com/lwansbrough/react-native-camera/tree/master/Example) folder.
### RCTCamera
Since `1.0.0`, RCTCamera is deprecated, but if you want to use it, you can see its [documentation](./docs/RCTCamera.md).
## Open Collective
We are just beginning a funding campaign for react-native-camera. Contributions are greatly appreciated. When we gain more than $250 we will begin distributing funds to core maintainers in a fully transparent manner. Feedback for this process is welcomed, we will continue to evolve the strategy as we grow and learn more.

246
THIRD-PARTY-LICENSES Normal file
View File

@ -0,0 +1,246 @@
===============================================================================
expo/expo
https://github.com/expo/expo
-------------------------------------------------------------------------------
BSD License
For Exponent software
Copyright (c) 2015-present, 650 Industries, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names 650 Industries, Exponent, nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
===============================================================================
google/cameraview
https://github.com/google/cameraview
-------------------------------------------------------------------------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,24 +1,27 @@
buildscript {
repositories {
jcenter()
maven {
url 'https://maven.google.com'
}
}
dependencies {
classpath 'com.android.tools.build:gradle:1.2.3'
classpath 'com.android.tools.build:gradle:3.0.0'
}
}
apply plugin: 'com.android.library'
android {
compileSdkVersion 25
buildToolsVersion "25.0.2"
compileSdkVersion 26
buildToolsVersion "26.0.2"
defaultConfig {
minSdkVersion 16
targetSdkVersion 22
targetSdkVersion 26
versionCode 1
versionName "1.0"
versionName "1.0.0"
}
lintOptions {
abortOnError false
@ -28,10 +31,18 @@ android {
repositories {
mavenCentral()
maven {
url 'https://maven.google.com'
}
maven { url "https://jitpack.io" }
}
dependencies {
compile "com.facebook.react:react-native:0.19.+"
compile 'com.facebook.react:react-native:+'
compile "com.google.zxing:core:3.2.1"
compile "com.drewnoakes:metadata-extractor:2.9.1"
compile 'com.google.android.gms:play-services-vision:+'
compile "com.android.support:exifinterface:+"
compile 'com.github.react-native-community:cameraview:d5d9b0d925494ef451ce3eef3fdf14cc874d9baa'
}

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Sun Dec 31 13:43:56 BRST 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.1-all.zip

160
android/gradlew vendored Executable file
View File

@ -0,0 +1,160 @@
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"

90
android/gradlew.bat vendored Normal file
View File

@ -0,0 +1,90 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -1,4 +1,4 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.lwansbrough.RCTCamera">
<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="org.reactnative.camera">
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" android:required="false" />

View File

@ -1,5 +1,6 @@
package com.lwansbrough.RCTCamera;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
@ -36,6 +37,14 @@ public class MutableImage {
this.currentRepresentation = toBitmap(originalImageData);
}
public int getWidth() {
return this.currentRepresentation.getWidth();
}
public int getHeight() {
return this.currentRepresentation.getHeight();
}
public void mirrorImage() throws ImageMutationFailedException {
Matrix m = new Matrix();
@ -45,8 +54,8 @@ public class MutableImage {
currentRepresentation,
0,
0,
currentRepresentation.getWidth(),
currentRepresentation.getHeight(),
getWidth(),
getHeight(),
m,
false
);
@ -76,6 +85,25 @@ public class MutableImage {
}
}
public void cropToPreview(double previewRatio) throws IllegalArgumentException {
int pictureWidth = getWidth(), pictureHeight = getHeight();
int targetPictureWidth, targetPictureHeight;
if (previewRatio * pictureHeight > pictureWidth) {
targetPictureWidth = pictureWidth;
targetPictureHeight = (int) (pictureWidth / previewRatio);
} else {
targetPictureHeight = pictureHeight;
targetPictureWidth = (int) (pictureHeight * previewRatio);
}
this.currentRepresentation = Bitmap.createBitmap(
this.currentRepresentation,
(pictureWidth - targetPictureWidth) / 2,
(pictureHeight - targetPictureHeight) / 2,
targetPictureWidth,
targetPictureHeight);
}
//see http://www.impulseadventure.com/photo/exif-orientation.html
private void rotate(int exifOrientation) throws ImageMutationFailedException {
final Matrix bitmapMatrix = new Matrix();
@ -114,8 +142,8 @@ public class MutableImage {
currentRepresentation,
0,
0,
currentRepresentation.getWidth(),
currentRepresentation.getHeight(),
getWidth(),
getHeight(),
bitmapMatrix,
false
);

View File

@ -4,6 +4,7 @@
package com.lwansbrough.RCTCamera;
import android.graphics.drawable.GradientDrawable;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.util.Log;
@ -73,6 +74,22 @@ public class RCTCamera {
return cameraInfo.previewHeight;
}
public int getPreviewVisibleHeight(int type) {
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
if (null == cameraInfo) {
return 0;
}
return cameraInfo.previewVisibleHeight;
}
public int getPreviewVisibleWidth(int type) {
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
if (null == cameraInfo) {
return 0;
}
return cameraInfo.previewVisibleWidth;
}
public Camera.Size getBestSize(List<Camera.Size> supportedSizes, int maxWidth, int maxHeight) {
Camera.Size bestSize = null;
for (Camera.Size size : supportedSizes) {
@ -123,8 +140,8 @@ public class RCTCamera {
continue;
}
int currentDelta = Math.abs(closestSize.width - matchWidth) * Math.abs(closestSize.height - matchHeight);
int newDelta = Math.abs(size.width - matchWidth) * Math.abs(size.height - matchHeight);
double currentDelta = Math.sqrt(Math.pow(closestSize.width - matchWidth,2) + Math.pow(closestSize.height - matchHeight,2));
double newDelta = Math.sqrt(Math.pow(size.width - matchWidth,2) + Math.pow(size.height - matchHeight,2));
if (newDelta < currentDelta) {
closestSize = size;
@ -345,6 +362,22 @@ public class RCTCamera {
}
}
public void setZoom(int cameraType, int zoom) {
Camera camera = this.acquireCameraInstance(cameraType);
if (null == camera) {
return;
}
Camera.Parameters parameters = camera.getParameters();
int maxZoom = parameters.getMaxZoom();
if (parameters.isZoomSupported()) {
if (zoom >=0 && zoom < maxZoom) {
parameters.setZoom(zoom);
camera.setParameters(parameters);
}
}
}
public void adjustCameraRotationToDeviceOrientation(int type, int deviceOrientation) {
Camera camera = _cameras.get(type);
if (null == camera) {
@ -418,6 +451,16 @@ public class RCTCamera {
}
}
public void setPreviewVisibleSize(int type, int width, int height) {
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
if (cameraInfo == null) {
return;
}
cameraInfo.previewVisibleWidth = width;
cameraInfo.previewVisibleHeight = height;
}
private RCTCamera(int deviceOrientation) {
_cameras = new HashMap<>();
_cameraInfos = new HashMap<>();
@ -448,6 +491,8 @@ public class RCTCamera {
public int rotation = 0;
public int previewWidth = -1;
public int previewHeight = -1;
public int previewVisibleWidth = -1;
public int previewVisibleHeight = -1;
public CameraInfoWrapper(Camera.CameraInfo info) {
this.info = info;

View File

@ -6,6 +6,7 @@
package com.lwansbrough.RCTCamera;
import android.content.ContentValues;
import android.content.res.Configuration;
import android.hardware.Camera;
import android.media.*;
import android.net.Uri;
@ -73,7 +74,6 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
private static ReactApplicationContext _reactContext;
private RCTSensorOrientationChecker _sensorOrientationChecker;
private MediaActionSound sound = new MediaActionSound();
private MediaRecorder mMediaRecorder;
private long MRStartTime;
@ -88,7 +88,6 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
_reactContext = reactContext;
_sensorOrientationChecker = new RCTSensorOrientationChecker(_reactContext);
_reactContext.addLifecycleEventListener(this);
sound.load(MediaActionSound.SHUTTER_CLICK);
}
public static ReactApplicationContext getReactContextSingleton() {
@ -486,6 +485,11 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
@ReactMethod
public void capture(final ReadableMap options, final Promise promise) {
if (RCTCamera.getInstance() == null) {
promise.reject("Camera is not ready yet.");
return;
}
int orientation = options.hasKey("orientation") ? options.getInt("orientation") : RCTCamera.getInstance().getOrientation();
if (orientation == RCT_CAMERA_ORIENTATION_AUTO) {
_sensorOrientationChecker.onResume();
@ -504,7 +508,7 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
}
private void captureWithOrientation(final ReadableMap options, final Promise promise, int deviceOrientation) {
Camera camera = RCTCamera.getInstance().acquireCameraInstance(options.getInt("type"));
final Camera camera = RCTCamera.getInstance().acquireCameraInstance(options.getInt("type"));
if (null == camera) {
promise.reject("No camera found.");
return;
@ -518,6 +522,7 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
RCTCamera.getInstance().setCaptureQuality(options.getInt("type"), options.getString("quality"));
if (options.hasKey("playSoundOnCapture") && options.getBoolean("playSoundOnCapture")) {
MediaActionSound sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
}
@ -545,9 +550,21 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
}
};
Camera.ShutterCallback shutterCallback = new Camera.ShutterCallback() {
@Override
public void onShutter() {
try {
camera.setPreviewCallback(null);
camera.setPreviewTexture(null);
} catch (Exception e) {
e.printStackTrace();
}
}
};
if(mSafeToCapture) {
try {
camera.takePicture(null, null, captureCallback);
camera.takePicture(shutterCallback, null, captureCallback);
mSafeToCapture = false;
} catch(RuntimeException ex) {
Log.e(TAG, "Couldn't capture photo.", ex);
@ -569,6 +586,25 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
}
}
boolean needsReorient = false;
double previewRatio, pictureRatio = (double) mutableImage.getWidth() / (double) mutableImage.getHeight();
try {
int type = options.getInt("type");
previewRatio = (double) RCTCamera.getInstance().getPreviewVisibleWidth(type) / (double) RCTCamera.getInstance().getPreviewVisibleHeight(type);
needsReorient = (previewRatio > 1) != (pictureRatio > 1);
} catch (IllegalArgumentException e) {
previewRatio = pictureRatio;
}
boolean shouldCropToPreview = options.hasKey("cropToPreview") && options.getBoolean("cropToPreview");
if (shouldCropToPreview) {
try {
mutableImage.cropToPreview(needsReorient ? 1.0 / previewRatio : previewRatio);
} catch (IllegalArgumentException e) {
promise.reject("Error cropping image to preview", e);
}
}
boolean shouldMirror = options.hasKey("mirrorImage") && options.getBoolean("mirrorImage");
if (shouldMirror) {
try {
@ -583,11 +619,16 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
jpegQualityPercent = options.getInt("jpegQuality");
}
int imgWidth = (needsReorient) ? mutableImage.getHeight() : mutableImage.getWidth();
int imgHeight = (needsReorient) ? mutableImage.getWidth() : mutableImage.getHeight();
switch (options.getInt("target")) {
case RCT_CAMERA_CAPTURE_TARGET_MEMORY:
String encoded = mutableImage.toBase64(jpegQualityPercent);
WritableMap response = new WritableNativeMap();
response.putString("data", encoded);
response.putInt("width", imgWidth);
response.putInt("height", imgHeight);
promise.resolve(response);
break;
case RCT_CAMERA_CAPTURE_TARGET_CAMERA_ROLL: {
@ -606,7 +647,7 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
addToMediaStore(cameraRollFile.getAbsolutePath());
resolveImage(cameraRollFile, promise, true);
resolveImage(cameraRollFile, imgWidth, imgHeight, promise, true);
break;
}
@ -618,13 +659,13 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
}
try {
mutableImage.writeDataToFile(pictureFile, options, 85);
mutableImage.writeDataToFile(pictureFile, options, jpegQualityPercent);
} catch (IOException e) {
promise.reject("failed to save image file", e);
return;
}
resolveImage(pictureFile, promise, false);
resolveImage(pictureFile, imgWidth, imgHeight, promise, false);
break;
}
@ -636,13 +677,13 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
}
try {
mutableImage.writeDataToFile(tempFile, options, 85);
mutableImage.writeDataToFile(tempFile, options, jpegQualityPercent);
} catch (IOException e) {
promise.reject("failed to save image file", e);
return;
}
resolveImage(tempFile, promise, false);
resolveImage(tempFile, imgWidth, imgHeight, promise, false);
break;
}
@ -670,6 +711,24 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
promise.resolve(null != flashModes && !flashModes.isEmpty());
}
@ReactMethod
public void setZoom(ReadableMap options, int zoom) {
RCTCamera instance = RCTCamera.getInstance();
if (instance == null) return;
Camera camera = instance.acquireCameraInstance(options.getInt("type"));
if (camera == null) return;
Camera.Parameters parameters = camera.getParameters();
int maxZoom = parameters.getMaxZoom();
if (parameters.isZoomSupported()) {
if (zoom >=0 && zoom < maxZoom) {
parameters.setZoom(zoom);
camera.setParameters(parameters);
}
}
}
private File getOutputMediaFile(int type) {
// Get environment directory type id from requested media type.
String environmentDirectoryType;
@ -765,9 +824,11 @@ public class RCTCameraModule extends ReactContextBaseJavaModule
// ... do nothing
}
private void resolveImage(final File imageFile, final Promise promise, boolean addToMediaStore) {
private void resolveImage(final File imageFile, final int imgWidth, final int imgHeight, final Promise promise, boolean addToMediaStore) {
final WritableMap response = new WritableNativeMap();
response.putString("path", Uri.fromFile(imageFile).toString());
response.putInt("width", imgWidth);
response.putInt("height", imgHeight);
if(addToMediaStore) {
// borrowed from react-native CameraRollManager, it finds and returns the 'internal'

View File

@ -1,31 +0,0 @@
package com.lwansbrough.RCTCamera;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import com.facebook.react.ReactPackage;
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.uimanager.ViewManager;
import com.facebook.react.bridge.JavaScriptModule;
public class RCTCameraPackage implements ReactPackage {
@Override
public List<NativeModule> createNativeModules(ReactApplicationContext reactApplicationContext) {
return Collections.<NativeModule>singletonList(new RCTCameraModule(reactApplicationContext));
}
// Deprecated in RN 0.47
public List<Class<? extends JavaScriptModule>> createJSModules() {
return Collections.emptyList();
}
@Override
public List<ViewManager> createViewManagers(ReactApplicationContext reactApplicationContext) {
//noinspection ArraysAsListWithZeroOrOneArgument
return Collections.<ViewManager>singletonList(new RCTCameraViewManager());
}
}

View File

@ -23,6 +23,8 @@ public class RCTCameraView extends ViewGroup {
private String _captureQuality = "high";
private int _torchMode = -1;
private int _flashMode = -1;
private int _zoom = 0;
private boolean _clearWindowBackground = false;
public RCTCameraView(Context context) {
super(context);
@ -76,6 +78,10 @@ public class RCTCameraView extends ViewGroup {
if (-1 != this._torchMode) {
_viewFinder.setTorchMode(this._torchMode);
}
if (0 != this._zoom) {
_viewFinder.setZoom(this._zoom);
}
_viewFinder.setClearWindowBackground(this._clearWindowBackground);
addView(_viewFinder);
}
}
@ -108,6 +114,13 @@ public class RCTCameraView extends ViewGroup {
}
}
public void setZoom(int zoom) {
this._zoom = zoom;
if (this._viewFinder != null) {
this._viewFinder.setZoom(zoom);
}
}
public void setOrientation(int orientation) {
RCTCamera.getInstance().setOrientation(orientation);
if (this._viewFinder != null) {
@ -123,6 +136,23 @@ public class RCTCameraView extends ViewGroup {
RCTCamera.getInstance().setBarCodeTypes(types);
}
public void setClearWindowBackground(boolean clearWindowBackground) {
this._clearWindowBackground = clearWindowBackground;
if (this._viewFinder != null) {
this._viewFinder.setClearWindowBackground(clearWindowBackground);
}
}
public void stopPreview() {
if (_viewFinder == null) return;
_viewFinder.stopPreview();
}
public void startPreview() {
if (_viewFinder == null) return;
_viewFinder.startPreview();
}
private boolean setActualDeviceOrientation(Context context) {
int actualDeviceOrientation = getDeviceOrientation(context);
if (_actualDeviceOrientation != actualDeviceOrientation) {
@ -180,6 +210,8 @@ public class RCTCameraView extends ViewGroup {
int viewFinderPaddingX = (int) ((width - viewfinderWidth) / 2);
int viewFinderPaddingY = (int) ((height - viewfinderHeight) / 2);
RCTCamera.getInstance().setPreviewVisibleSize(_viewFinder.getCameraType(), (int) width, (int) height);
this._viewFinder.layout(viewFinderPaddingX, viewFinderPaddingY, viewFinderPaddingX + viewfinderWidth, viewFinderPaddingY + viewfinderHeight);
this.postInvalidate(this.getLeft(), this.getTop(), this.getRight(), this.getBottom());
}

View File

@ -4,7 +4,9 @@
package com.lwansbrough.RCTCamera;
import android.app.Activity;
import android.content.Context;
import android.content.ContextWrapper;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
@ -41,6 +43,7 @@ class RCTCameraViewFinder extends TextureView implements TextureView.SurfaceText
private boolean _isStarting;
private boolean _isStopping;
private Camera _camera;
private boolean _clearWindowBackground = false;
private float mFingerSpacing;
// concurrency lock for barcode scanner to avoid flooding the runtime
@ -83,6 +86,10 @@ class RCTCameraViewFinder extends TextureView implements TextureView.SurfaceText
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
public int getCameraType() {
return _cameraType;
}
public double getRatio() {
int width = RCTCamera.getInstance().getPreviewWidth(this._cameraType);
int height = RCTCamera.getInstance().getPreviewHeight(this._cameraType);
@ -120,13 +127,21 @@ class RCTCameraViewFinder extends TextureView implements TextureView.SurfaceText
RCTCamera.getInstance().setFlashMode(_cameraType, flashMode);
}
private void startPreview() {
public void setClearWindowBackground(boolean clearWindowBackground) {
this._clearWindowBackground = clearWindowBackground;
}
public void setZoom(int zoom) {
RCTCamera.getInstance().setZoom(_cameraType, zoom);
}
public void startPreview() {
if (_surfaceTexture != null) {
startCamera();
}
}
private void stopPreview() {
public void stopPreview() {
if (_camera != null) {
stopCamera();
}
@ -176,6 +191,12 @@ class RCTCameraViewFinder extends TextureView implements TextureView.SurfaceText
_camera.setParameters(parameters);
_camera.setPreviewTexture(_surfaceTexture);
_camera.startPreview();
// clear window background if needed
if (_clearWindowBackground) {
Activity activity = getActivity();
if (activity != null)
activity.getWindow().setBackgroundDrawable(null);
}
// send previews to `onPreviewFrame`
_camera.setPreviewCallback(this);
} catch (NullPointerException e) {
@ -209,6 +230,17 @@ class RCTCameraViewFinder extends TextureView implements TextureView.SurfaceText
}
}
private Activity getActivity() {
Context context = getContext();
while (context instanceof ContextWrapper) {
if (context instanceof Activity) {
return (Activity)context;
}
context = ((ContextWrapper)context).getBaseContext();
}
return null;
}
/**
* Parse barcodes as BarcodeFormat constants.
*
@ -300,39 +332,63 @@ class RCTCameraViewFinder extends TextureView implements TextureView.SurfaceText
this.imageData = imageData;
}
private Result getBarcode(int width, int height) {
try{
PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(imageData, width, height, 0, 0, width, height, false);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
return _multiFormatReader.decodeWithState(bitmap);
} catch (Throwable t) {
// meh
} finally {
_multiFormatReader.reset();
}
return null;
}
private Result getBarcodeAnyOrientation() {
Camera.Size size = camera.getParameters().getPreviewSize();
int width = size.width;
int height = size.height;
Result result = getBarcode(width, height);
if (result != null)
return result;
rotateImage(width, height);
width = size.height;
height = size.width;
return getBarcode(width, height);
}
private void rotateImage(int width, int height) {
byte[] rotated = new byte[imageData.length];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
rotated[x * height + height - y - 1] = imageData[x + y * width];
}
}
imageData = rotated;
}
@Override
protected Void doInBackground(Void... ignored) {
if (isCancelled()) {
return null;
}
Camera.Size size = camera.getParameters().getPreviewSize();
int width = size.width;
int height = size.height;
// rotate for zxing if orientation is portrait
if (RCTCamera.getInstance().getActualDeviceOrientation() == 0) {
byte[] rotated = new byte[imageData.length];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
rotated[x * height + height - y - 1] = imageData[x + y * width];
}
}
width = size.height;
height = size.width;
imageData = rotated;
}
try {
PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(imageData, width, height, 0, 0, width, height, false);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
Result result = _multiFormatReader.decodeWithState(bitmap);
// rotate for zxing if orientation is portrait
Result result = getBarcodeAnyOrientation();
if (result == null){
throw new Exception();
}
ReactContext reactContext = RCTCameraModule.getReactContextSingleton();
WritableMap event = Arguments.createMap();
WritableArray resultPoints = Arguments.createArray();
ResultPoint[] points = result.getResultPoints();
if(points != null) {
for (ResultPoint point : points) {
WritableMap newPoint = Arguments.createMap();
@ -359,6 +415,11 @@ class RCTCameraViewFinder extends TextureView implements TextureView.SurfaceText
@Override
public boolean onTouchEvent(MotionEvent event) {
// Fast swiping and touching while component is being loaded can cause _camera to be null.
if (_camera == null) {
return false;
}
// Get the pointer ID
Camera.Parameters params = _camera.getParameters();
int action = event.getAction();

View File

@ -2,16 +2,22 @@ package com.lwansbrough.RCTCamera;
import android.support.annotation.Nullable;
import com.facebook.infer.annotation.Assertions;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.uimanager.*;
import com.facebook.react.uimanager.annotations.ReactProp;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
public class RCTCameraViewManager extends ViewGroupManager<RCTCameraView> {
private static final String REACT_CLASS = "RCTCamera";
public static final int COMMAND_STOP_PREVIEW = 1;
public static final int COMMAND_START_PREVIEW = 2;
@Override
public String getName() {
return REACT_CLASS;
@ -22,6 +28,33 @@ public class RCTCameraViewManager extends ViewGroupManager<RCTCameraView> {
return new RCTCameraView(context);
}
@Override
public Map<String, Integer> getCommandsMap() {
return MapBuilder.of(
"stopPreview",
COMMAND_STOP_PREVIEW,
"startPreview",
COMMAND_START_PREVIEW);
}
@Override
public void receiveCommand(RCTCameraView view, int commandType, @Nullable ReadableArray args) {
Assertions.assertNotNull(view);
switch (commandType) {
case COMMAND_STOP_PREVIEW: {
view.stopPreview();
return;
}
case COMMAND_START_PREVIEW: {
view.startPreview();
return;
}
default:
throw new IllegalArgumentException(
String.format("Unsupported command %d received by %s.", commandType, getClass().getSimpleName()));
}
}
@ReactProp(name = "aspect")
public void setAspect(RCTCameraView view, int aspect) {
view.setAspect(aspect);
@ -60,6 +93,11 @@ public class RCTCameraViewManager extends ViewGroupManager<RCTCameraView> {
view.setFlashMode(flashMode);
}
@ReactProp(name = "zoom")
public void setZoom(RCTCameraView view, int zoom) {
view.setZoom(zoom);
}
@ReactProp(name = "orientation")
public void setOrientation(RCTCameraView view, int orientation) {
view.setOrientation(orientation);
@ -86,4 +124,9 @@ public class RCTCameraViewManager extends ViewGroupManager<RCTCameraView> {
}
view.setBarCodeTypes(result);
}
@ReactProp(name = "clearWindowBackground")
public void setClearWindowBackground(RCTCameraView view, boolean clearWindowBackground) {
view.setClearWindowBackground(clearWindowBackground);
}
}

View File

@ -0,0 +1,287 @@
package org.reactnative.camera;
import android.graphics.Bitmap;
import android.os.Build;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.uimanager.NativeViewHierarchyManager;
import com.facebook.react.uimanager.UIBlock;
import com.facebook.react.uimanager.UIManagerModule;
import com.google.android.cameraview.AspectRatio;
import com.google.zxing.BarcodeFormat;
import org.reactnative.camera.tasks.ResolveTakenPictureAsyncTask;
import org.reactnative.camera.utils.ScopedContext;
import org.reactnative.facedetector.RNFaceDetector;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
public class CameraModule extends ReactContextBaseJavaModule {
private static final String TAG = "CameraModule";
private ScopedContext mScopedContext;
static final int VIDEO_2160P = 0;
static final int VIDEO_1080P = 1;
static final int VIDEO_720P = 2;
static final int VIDEO_480P = 3;
static final int VIDEO_4x3 = 4;
public static final Map<String, Object> VALID_BARCODE_TYPES =
Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("aztec", BarcodeFormat.AZTEC.toString());
put("ean13", BarcodeFormat.EAN_13.toString());
put("ean8", BarcodeFormat.EAN_8.toString());
put("qr", BarcodeFormat.QR_CODE.toString());
put("pdf417", BarcodeFormat.PDF_417.toString());
put("upc_e", BarcodeFormat.UPC_E.toString());
put("datamatrix", BarcodeFormat.DATA_MATRIX.toString());
put("code39", BarcodeFormat.CODE_39.toString());
put("code93", BarcodeFormat.CODE_93.toString());
put("interleaved2of5", BarcodeFormat.ITF.toString());
put("codabar", BarcodeFormat.CODABAR.toString());
put("code128", BarcodeFormat.CODE_128.toString());
put("maxicode", BarcodeFormat.MAXICODE.toString());
put("rss14", BarcodeFormat.RSS_14.toString());
put("rssexpanded", BarcodeFormat.RSS_EXPANDED.toString());
put("upc_a", BarcodeFormat.UPC_A.toString());
put("upc_ean", BarcodeFormat.UPC_EAN_EXTENSION.toString());
}
});
public CameraModule(ReactApplicationContext reactContext) {
super(reactContext);
mScopedContext = new ScopedContext(reactContext);
}
public ScopedContext getScopedContext() {
return mScopedContext;
}
@Override
public String getName() {
return "RNCameraModule";
}
@Nullable
@Override
public Map<String, Object> getConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("Type", getTypeConstants());
put("FlashMode", getFlashModeConstants());
put("AutoFocus", getAutoFocusConstants());
put("WhiteBalance", getWhiteBalanceConstants());
put("VideoQuality", getVideoQualityConstants());
put("BarCodeType", getBarCodeConstants());
put("FaceDetection", Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("Mode", getFaceDetectionModeConstants());
put("Landmarks", getFaceDetectionLandmarksConstants());
put("Classifications", getFaceDetectionClassificationsConstants());
}
private Map<String, Object> getFaceDetectionModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("fast", RNFaceDetector.FAST_MODE);
put("accurate", RNFaceDetector.ACCURATE_MODE);
}
});
}
private Map<String, Object> getFaceDetectionClassificationsConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", RNFaceDetector.ALL_CLASSIFICATIONS);
put("none", RNFaceDetector.NO_CLASSIFICATIONS);
}
});
}
private Map<String, Object> getFaceDetectionLandmarksConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", RNFaceDetector.ALL_LANDMARKS);
put("none", RNFaceDetector.NO_LANDMARKS);
}
});
}
}));
}
private Map<String, Object> getTypeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("front", Constants.FACING_FRONT);
put("back", Constants.FACING_BACK);
}
});
}
private Map<String, Object> getFlashModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("off", Constants.FLASH_OFF);
put("on", Constants.FLASH_ON);
put("auto", Constants.FLASH_AUTO);
put("torch", Constants.FLASH_TORCH);
}
});
}
private Map<String, Object> getAutoFocusConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("on", true);
put("off", false);
}
});
}
private Map<String, Object> getWhiteBalanceConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("auto", Constants.WB_AUTO);
put("cloudy", Constants.WB_CLOUDY);
put("sunny", Constants.WB_SUNNY);
put("shadow", Constants.WB_SHADOW);
put("fluorescent", Constants.WB_FLUORESCENT);
put("incandescent", Constants.WB_INCANDESCENT);
}
});
}
private Map<String, Object> getVideoQualityConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("2160p", VIDEO_2160P);
put("1080p", VIDEO_1080P);
put("720p", VIDEO_720P);
put("480p", VIDEO_480P);
put("4:3", VIDEO_4x3);
}
});
}
private Map<String, Object> getBarCodeConstants() {
return VALID_BARCODE_TYPES;
}
});
}
@ReactMethod
public void takePicture(final ReadableMap options, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
final File cacheDirectory = mScopedContext.getCacheDirectory();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
RNCameraView cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
try {
if (!Build.FINGERPRINT.contains("generic")) {
if (cameraView.isCameraOpened()) {
cameraView.takePicture(options, promise, cacheDirectory);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} else {
Bitmap image = RNCameraViewHelper.generateSimulatorPhoto(cameraView.getWidth(), cameraView.getHeight());
ByteBuffer byteBuffer = ByteBuffer.allocate(image.getRowBytes() * image.getHeight());
image.copyPixelsToBuffer(byteBuffer);
new ResolveTakenPictureAsyncTask(byteBuffer.array(), promise, options).execute();
}
} catch (Exception e) {
promise.reject("E_CAMERA_BAD_VIEWTAG", "takePictureAsync: Expected a Camera component");
}
}
});
}
@ReactMethod
public void record(final ReadableMap options, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
final File cacheDirectory = mScopedContext.getCacheDirectory();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.record(options, promise, cacheDirectory);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} catch (Exception e) {
promise.reject("E_CAMERA_BAD_VIEWTAG", "recordAsync: Expected a Camera component");
}
}
});
}
@ReactMethod
public void stopRecording(final int viewTag) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.stopRecording();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
@ReactMethod
public void getSupportedRatios(final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
WritableArray result = Arguments.createArray();
if (cameraView.isCameraOpened()) {
Set<AspectRatio> ratios = cameraView.getSupportedAspectRatios();
for (AspectRatio ratio : ratios) {
result.pushString(ratio.toString());
}
promise.resolve(result);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
}

View File

@ -0,0 +1,136 @@
package org.reactnative.camera;
import android.support.annotation.Nullable;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.uimanager.ThemedReactContext;
import com.facebook.react.uimanager.ViewGroupManager;
import com.facebook.react.uimanager.annotations.ReactProp;
import com.google.android.cameraview.AspectRatio;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class CameraViewManager extends ViewGroupManager<RNCameraView> {
public enum Events {
EVENT_CAMERA_READY("onCameraReady"),
EVENT_ON_MOUNT_ERROR("onMountError"),
EVENT_ON_BAR_CODE_READ("onBarCodeRead"),
EVENT_ON_FACES_DETECTED("onFacesDetected"),
EVENT_ON_FACE_DETECTION_ERROR("onFaceDetectionError");
private final String mName;
Events(final String name) {
mName = name;
}
@Override
public String toString() {
return mName;
}
}
private static final String REACT_CLASS = "RNCamera";
@Override
public void onDropViewInstance(RNCameraView view) {
view.stop();
super.onDropViewInstance(view);
}
@Override
public String getName() {
return REACT_CLASS;
}
@Override
protected RNCameraView createViewInstance(ThemedReactContext themedReactContext) {
return new RNCameraView(themedReactContext);
}
@Override
@Nullable
public Map<String, Object> getExportedCustomDirectEventTypeConstants() {
MapBuilder.Builder<String, Object> builder = MapBuilder.builder();
for (Events event : Events.values()) {
builder.put(event.toString(), MapBuilder.of("registrationName", event.toString()));
}
return builder.build();
}
@ReactProp(name = "type")
public void setType(RNCameraView view, int type) {
view.setFacing(type);
}
@ReactProp(name = "ratio")
public void setRatio(RNCameraView view, String ratio) {
view.setAspectRatio(AspectRatio.parse(ratio));
}
@ReactProp(name = "flashMode")
public void setFlashMode(RNCameraView view, int torchMode) {
view.setFlash(torchMode);
}
@ReactProp(name = "autoFocus")
public void setAutoFocus(RNCameraView view, boolean autoFocus) {
view.setAutoFocus(autoFocus);
}
@ReactProp(name = "focusDepth")
public void setFocusDepth(RNCameraView view, float depth) {
view.setFocusDepth(depth);
}
@ReactProp(name = "zoom")
public void setZoom(RNCameraView view, float zoom) {
view.setZoom(zoom);
}
@ReactProp(name = "whiteBalance")
public void setWhiteBalance(RNCameraView view, int whiteBalance) {
view.setWhiteBalance(whiteBalance);
}
@ReactProp(name = "barCodeTypes")
public void setBarCodeTypes(RNCameraView view, ReadableArray barCodeTypes) {
if (barCodeTypes == null) {
return;
}
List<String> result = new ArrayList<>(barCodeTypes.size());
for (int i = 0; i < barCodeTypes.size(); i++) {
result.add(barCodeTypes.getString(i));
}
view.setBarCodeTypes(result);
}
@ReactProp(name = "barCodeScannerEnabled")
public void setBarCodeScanning(RNCameraView view, boolean barCodeScannerEnabled) {
view.setShouldScanBarCodes(barCodeScannerEnabled);
}
@ReactProp(name = "faceDetectorEnabled")
public void setFaceDetecting(RNCameraView view, boolean faceDetectorEnabled) {
view.setShouldDetectFaces(faceDetectorEnabled);
}
@ReactProp(name = "faceDetectionMode")
public void setFaceDetectionMode(RNCameraView view, int mode) {
view.setFaceDetectionMode(mode);
}
@ReactProp(name = "faceDetectionLandmarks")
public void setFaceDetectionLandmarks(RNCameraView view, int landmarks) {
view.setFaceDetectionLandmarks(landmarks);
}
@ReactProp(name = "faceDetectionClassifications")
public void setFaceDetectionClassifications(RNCameraView view, int classifications) {
view.setFaceDetectionClassifications(classifications);
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.reactnative.camera;
import com.google.android.cameraview.AspectRatio;
public interface Constants {
AspectRatio DEFAULT_ASPECT_RATIO = AspectRatio.of(4, 3);
int FACING_BACK = 0;
int FACING_FRONT = 1;
int FLASH_OFF = 0;
int FLASH_ON = 1;
int FLASH_TORCH = 2;
int FLASH_AUTO = 3;
int FLASH_RED_EYE = 4;
int LANDSCAPE_90 = 90;
int LANDSCAPE_270 = 270;
int WB_AUTO = 0;
int WB_CLOUDY = 1;
int WB_SUNNY = 2;
int WB_SHADOW = 3;
int WB_FLUORESCENT = 4;
int WB_INCANDESCENT = 5;
}

View File

@ -0,0 +1,45 @@
package org.reactnative.camera;
import com.facebook.react.ReactPackage;
import com.facebook.react.bridge.JavaScriptModule;
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.uimanager.ViewManager;
import com.lwansbrough.RCTCamera.RCTCameraModule;
import com.lwansbrough.RCTCamera.RCTCameraViewManager;
import org.reactnative.camera.CameraModule;
import org.reactnative.camera.CameraViewManager;
import org.reactnative.facedetector.FaceDetectorModule;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Created by jgfidelis on 02/02/18.
*/
public class RNCameraPackage implements ReactPackage {
@Override
public List<NativeModule> createNativeModules(ReactApplicationContext reactApplicationContext) {
return Arrays.<NativeModule>asList(
new RCTCameraModule(reactApplicationContext),
new CameraModule(reactApplicationContext),
new FaceDetectorModule(reactApplicationContext)
);
}
// Deprecated in RN 0.47
public List<Class<? extends JavaScriptModule>> createJSModules() {
return Collections.emptyList();
}
@Override
public List<ViewManager> createViewManagers(ReactApplicationContext reactApplicationContext) {
return Arrays.<ViewManager>asList(
new RCTCameraViewManager(),
new CameraViewManager()
);
}
}

View File

@ -0,0 +1,329 @@
package org.reactnative.camera;
import android.Manifest;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.media.CamcorderProfile;
import android.os.Build;
import android.support.v4.content.ContextCompat;
import android.util.SparseArray;
import android.view.View;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.ThemedReactContext;
import com.google.android.cameraview.CameraView;
import com.google.android.gms.vision.face.Face;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.DecodeHintType;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.Result;
import org.reactnative.camera.tasks.BarCodeScannerAsyncTask;
import org.reactnative.camera.tasks.BarCodeScannerAsyncTaskDelegate;
import org.reactnative.camera.tasks.FaceDetectorAsyncTask;
import org.reactnative.camera.tasks.FaceDetectorAsyncTaskDelegate;
import org.reactnative.camera.tasks.ResolveTakenPictureAsyncTask;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.camera.utils.RNFileUtils;
import org.reactnative.facedetector.RNFaceDetector;
import java.io.File;
import java.io.IOException;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
public class RNCameraView extends CameraView implements LifecycleEventListener, BarCodeScannerAsyncTaskDelegate, FaceDetectorAsyncTaskDelegate {
private ThemedReactContext mThemedReactContext;
private Queue<Promise> mPictureTakenPromises = new ConcurrentLinkedQueue<>();
private Map<Promise, ReadableMap> mPictureTakenOptions = new ConcurrentHashMap<>();
private Map<Promise, File> mPictureTakenDirectories = new ConcurrentHashMap<>();
private Promise mVideoRecordedPromise;
private List<String> mBarCodeTypes = null;
private boolean mIsPaused = false;
private boolean mIsNew = true;
// Concurrency lock for scanners to avoid flooding the runtime
public volatile boolean barCodeScannerTaskLock = false;
public volatile boolean faceDetectorTaskLock = false;
// Scanning-related properties
private final MultiFormatReader mMultiFormatReader = new MultiFormatReader();
private final RNFaceDetector mFaceDetector;
private boolean mShouldDetectFaces = false;
private boolean mShouldScanBarCodes = false;
private int mFaceDetectorMode = RNFaceDetector.FAST_MODE;
private int mFaceDetectionLandmarks = RNFaceDetector.NO_LANDMARKS;
private int mFaceDetectionClassifications = RNFaceDetector.NO_CLASSIFICATIONS;
public RNCameraView(ThemedReactContext themedReactContext) {
super(themedReactContext);
initBarcodeReader();
mThemedReactContext = themedReactContext;
mFaceDetector = new RNFaceDetector(themedReactContext);
setupFaceDetector();
themedReactContext.addLifecycleEventListener(this);
addCallback(new Callback() {
@Override
public void onCameraOpened(CameraView cameraView) {
RNCameraViewHelper.emitCameraReadyEvent(cameraView);
}
@Override
public void onMountError(CameraView cameraView) {
RNCameraViewHelper.emitMountErrorEvent(cameraView);
}
@Override
public void onPictureTaken(CameraView cameraView, final byte[] data) {
Promise promise = mPictureTakenPromises.poll();
ReadableMap options = mPictureTakenOptions.remove(promise);
final File cacheDirectory = mPictureTakenDirectories.remove(promise);
new ResolveTakenPictureAsyncTask(data, promise, options, cacheDirectory).execute();
}
@Override
public void onVideoRecorded(CameraView cameraView, String path) {
if (mVideoRecordedPromise != null) {
if (path != null) {
WritableMap result = Arguments.createMap();
result.putString("uri", RNFileUtils.uriFromFile(new File(path)).toString());
mVideoRecordedPromise.resolve(result);
} else {
mVideoRecordedPromise.reject("E_RECORDING", "Couldn't stop recording - there is none in progress");
}
mVideoRecordedPromise = null;
}
}
@Override
public void onFramePreview(CameraView cameraView, byte[] data, int width, int height, int rotation) {
int correctRotation = RNCameraViewHelper.getCorrectCameraRotation(rotation, getFacing());
if (mShouldScanBarCodes && !barCodeScannerTaskLock && cameraView instanceof BarCodeScannerAsyncTaskDelegate) {
barCodeScannerTaskLock = true;
BarCodeScannerAsyncTaskDelegate delegate = (BarCodeScannerAsyncTaskDelegate) cameraView;
new BarCodeScannerAsyncTask(delegate, mMultiFormatReader, data, width, height).execute();
}
if (mShouldDetectFaces && !faceDetectorTaskLock && cameraView instanceof FaceDetectorAsyncTaskDelegate) {
faceDetectorTaskLock = true;
FaceDetectorAsyncTaskDelegate delegate = (FaceDetectorAsyncTaskDelegate) cameraView;
new FaceDetectorAsyncTask(delegate, mFaceDetector, data, width, height, correctRotation).execute();
}
}
});
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
View preview = getView();
if (null == preview) {
return;
}
this.setBackgroundColor(Color.BLACK);
int width = right - left;
int height = bottom - top;
preview.layout(0, 0, width, height);
}
@Override
public void requestLayout() {
// React handles this for us, so we don't need to call super.requestLayout();
}
@Override
public void onViewAdded(View child) {
if (this.getView() == child || this.getView() == null) return;
// remove and readd view to make sure it is in the back.
// @TODO figure out why there was a z order issue in the first place and fix accordingly.
this.removeView(this.getView());
this.addView(this.getView(), 0);
}
public void setBarCodeTypes(List<String> barCodeTypes) {
mBarCodeTypes = barCodeTypes;
initBarcodeReader();
}
public void takePicture(ReadableMap options, final Promise promise, File cacheDirectory) {
mPictureTakenPromises.add(promise);
mPictureTakenOptions.put(promise, options);
mPictureTakenDirectories.put(promise, cacheDirectory);
super.takePicture();
}
public void record(ReadableMap options, final Promise promise, File cacheDirectory) {
try {
String path = RNFileUtils.getOutputFilePath(cacheDirectory, ".mp4");
int maxDuration = options.hasKey("maxDuration") ? options.getInt("maxDuration") : -1;
int maxFileSize = options.hasKey("maxFileSize") ? options.getInt("maxFileSize") : -1;
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
if (options.hasKey("quality")) {
profile = RNCameraViewHelper.getCamcorderProfile(options.getInt("quality"));
}
boolean recordAudio = !options.hasKey("mute");
if (super.record(path, maxDuration * 1000, maxFileSize, recordAudio, profile)) {
mVideoRecordedPromise = promise;
} else {
promise.reject("E_RECORDING_FAILED", "Starting video recording failed. Another recording might be in progress.");
}
} catch (IOException e) {
promise.reject("E_RECORDING_FAILED", "Starting video recording failed - could not create video file.");
}
}
/**
* Initialize the barcode decoder.
* Supports all iOS codes except [code138, code39mod43, itf14]
* Additionally supports [codabar, code128, maxicode, rss14, rssexpanded, upc_a, upc_ean]
*/
private void initBarcodeReader() {
EnumMap<DecodeHintType, Object> hints = new EnumMap<>(DecodeHintType.class);
EnumSet<BarcodeFormat> decodeFormats = EnumSet.noneOf(BarcodeFormat.class);
if (mBarCodeTypes != null) {
for (String code : mBarCodeTypes) {
String formatString = (String) CameraModule.VALID_BARCODE_TYPES.get(code);
if (formatString != null) {
decodeFormats.add(BarcodeFormat.valueOf(code));
}
}
}
hints.put(DecodeHintType.POSSIBLE_FORMATS, decodeFormats);
mMultiFormatReader.setHints(hints);
}
public void setShouldScanBarCodes(boolean shouldScanBarCodes) {
this.mShouldScanBarCodes = shouldScanBarCodes;
setScanning(mShouldDetectFaces || mShouldScanBarCodes);
}
public void onBarCodeRead(Result barCode) {
String barCodeType = barCode.getBarcodeFormat().toString();
if (!mShouldScanBarCodes || !mBarCodeTypes.contains(barCodeType)) {
return;
}
RNCameraViewHelper.emitBarCodeReadEvent(this, barCode);
}
public void onBarCodeScanningTaskCompleted() {
barCodeScannerTaskLock = false;
mMultiFormatReader.reset();
}
/**
* Initial setup of the face detector
*/
private void setupFaceDetector() {
mFaceDetector.setMode(mFaceDetectorMode);
mFaceDetector.setLandmarkType(mFaceDetectionLandmarks);
mFaceDetector.setClassificationType(mFaceDetectionClassifications);
mFaceDetector.setTracking(true);
}
public void setFaceDetectionLandmarks(int landmarks) {
mFaceDetectionLandmarks = landmarks;
if (mFaceDetector != null) {
mFaceDetector.setLandmarkType(landmarks);
}
}
public void setFaceDetectionClassifications(int classifications) {
mFaceDetectionClassifications = classifications;
if (mFaceDetector != null) {
mFaceDetector.setClassificationType(classifications);
}
}
public void setFaceDetectionMode(int mode) {
mFaceDetectorMode = mode;
if (mFaceDetector != null) {
mFaceDetector.setMode(mode);
}
}
public void setShouldDetectFaces(boolean shouldDetectFaces) {
this.mShouldDetectFaces = shouldDetectFaces;
setScanning(mShouldDetectFaces || mShouldScanBarCodes);
}
public void onFacesDetected(SparseArray<Face> facesReported, int sourceWidth, int sourceHeight, int sourceRotation) {
if (!mShouldDetectFaces) {
return;
}
SparseArray<Face> facesDetected = facesReported == null ? new SparseArray<Face>() : facesReported;
ImageDimensions dimensions = new ImageDimensions(sourceWidth, sourceHeight, sourceRotation, getFacing());
RNCameraViewHelper.emitFacesDetectedEvent(this, facesDetected, dimensions);
}
public void onFaceDetectionError(RNFaceDetector faceDetector) {
if (!mShouldDetectFaces) {
return;
}
RNCameraViewHelper.emitFaceDetectionErrorEvent(this, faceDetector);
}
@Override
public void onFaceDetectingTaskCompleted() {
faceDetectorTaskLock = false;
}
@Override
public void onHostResume() {
if (hasCameraPermissions()) {
if ((mIsPaused && !isCameraOpened()) || mIsNew) {
mIsPaused = false;
mIsNew = false;
if (!Build.FINGERPRINT.contains("generic")) {
start();
}
}
} else {
WritableMap error = Arguments.createMap();
error.putString("message", "Camera permissions not granted - component could not be rendered.");
RNCameraViewHelper.emitMountErrorEvent(this);
}
}
@Override
public void onHostPause() {
if (!mIsPaused && isCameraOpened()) {
mIsPaused = true;
stop();
}
}
@Override
public void onHostDestroy() {
mFaceDetector.release();
stop();
}
private boolean hasCameraPermissions() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
int result = ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA);
return result == PackageManager.PERMISSION_GRANTED;
} else {
return true;
}
}
}

View File

@ -0,0 +1,299 @@
package org.reactnative.camera;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.media.CamcorderProfile;
import android.os.Build;
import android.support.media.ExifInterface;
import android.util.SparseArray;
import android.view.ViewGroup;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.UIManagerModule;
import com.google.android.cameraview.CameraView;
import com.google.android.gms.vision.face.Face;
import com.google.zxing.Result;
import org.reactnative.camera.events.BarCodeReadEvent;
import org.reactnative.camera.events.CameraMountErrorEvent;
import org.reactnative.camera.events.CameraReadyEvent;
import org.reactnative.camera.events.FaceDetectionErrorEvent;
import org.reactnative.camera.events.FacesDetectedEvent;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.facedetector.RNFaceDetector;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Locale;
public class RNCameraViewHelper {
public static final String[][] exifTags = new String[][]{
{"string", ExifInterface.TAG_ARTIST},
{"int", ExifInterface.TAG_BITS_PER_SAMPLE},
{"int", ExifInterface.TAG_COMPRESSION},
{"string", ExifInterface.TAG_COPYRIGHT},
{"string", ExifInterface.TAG_DATETIME},
{"string", ExifInterface.TAG_IMAGE_DESCRIPTION},
{"int", ExifInterface.TAG_IMAGE_LENGTH},
{"int", ExifInterface.TAG_IMAGE_WIDTH},
{"int", ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT},
{"int", ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT_LENGTH},
{"string", ExifInterface.TAG_MAKE},
{"string", ExifInterface.TAG_MODEL},
{"int", ExifInterface.TAG_ORIENTATION},
{"int", ExifInterface.TAG_PHOTOMETRIC_INTERPRETATION},
{"int", ExifInterface.TAG_PLANAR_CONFIGURATION},
{"double", ExifInterface.TAG_PRIMARY_CHROMATICITIES},
{"double", ExifInterface.TAG_REFERENCE_BLACK_WHITE},
{"int", ExifInterface.TAG_RESOLUTION_UNIT},
{"int", ExifInterface.TAG_ROWS_PER_STRIP},
{"int", ExifInterface.TAG_SAMPLES_PER_PIXEL},
{"string", ExifInterface.TAG_SOFTWARE},
{"int", ExifInterface.TAG_STRIP_BYTE_COUNTS},
{"int", ExifInterface.TAG_STRIP_OFFSETS},
{"int", ExifInterface.TAG_TRANSFER_FUNCTION},
{"double", ExifInterface.TAG_WHITE_POINT},
{"double", ExifInterface.TAG_X_RESOLUTION},
{"double", ExifInterface.TAG_Y_CB_CR_COEFFICIENTS},
{"int", ExifInterface.TAG_Y_CB_CR_POSITIONING},
{"int", ExifInterface.TAG_Y_CB_CR_SUB_SAMPLING},
{"double", ExifInterface.TAG_Y_RESOLUTION},
{"double", ExifInterface.TAG_APERTURE_VALUE},
{"double", ExifInterface.TAG_BRIGHTNESS_VALUE},
{"string", ExifInterface.TAG_CFA_PATTERN},
{"int", ExifInterface.TAG_COLOR_SPACE},
{"string", ExifInterface.TAG_COMPONENTS_CONFIGURATION},
{"double", ExifInterface.TAG_COMPRESSED_BITS_PER_PIXEL},
{"int", ExifInterface.TAG_CONTRAST},
{"int", ExifInterface.TAG_CUSTOM_RENDERED},
{"string", ExifInterface.TAG_DATETIME_DIGITIZED},
{"string", ExifInterface.TAG_DATETIME_ORIGINAL},
{"string", ExifInterface.TAG_DEVICE_SETTING_DESCRIPTION},
{"double", ExifInterface.TAG_DIGITAL_ZOOM_RATIO},
{"string", ExifInterface.TAG_EXIF_VERSION},
{"double", ExifInterface.TAG_EXPOSURE_BIAS_VALUE},
{"double", ExifInterface.TAG_EXPOSURE_INDEX},
{"int", ExifInterface.TAG_EXPOSURE_MODE},
{"int", ExifInterface.TAG_EXPOSURE_PROGRAM},
{"double", ExifInterface.TAG_EXPOSURE_TIME},
{"double", ExifInterface.TAG_F_NUMBER},
{"string", ExifInterface.TAG_FILE_SOURCE},
{"int", ExifInterface.TAG_FLASH},
{"double", ExifInterface.TAG_FLASH_ENERGY},
{"string", ExifInterface.TAG_FLASHPIX_VERSION},
{"double", ExifInterface.TAG_FOCAL_LENGTH},
{"int", ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM},
{"int", ExifInterface.TAG_FOCAL_PLANE_RESOLUTION_UNIT},
{"double", ExifInterface.TAG_FOCAL_PLANE_X_RESOLUTION},
{"double", ExifInterface.TAG_FOCAL_PLANE_Y_RESOLUTION},
{"int", ExifInterface.TAG_GAIN_CONTROL},
{"int", ExifInterface.TAG_ISO_SPEED_RATINGS},
{"string", ExifInterface.TAG_IMAGE_UNIQUE_ID},
{"int", ExifInterface.TAG_LIGHT_SOURCE},
{"string", ExifInterface.TAG_MAKER_NOTE},
{"double", ExifInterface.TAG_MAX_APERTURE_VALUE},
{"int", ExifInterface.TAG_METERING_MODE},
{"int", ExifInterface.TAG_NEW_SUBFILE_TYPE},
{"string", ExifInterface.TAG_OECF},
{"int", ExifInterface.TAG_PIXEL_X_DIMENSION},
{"int", ExifInterface.TAG_PIXEL_Y_DIMENSION},
{"string", ExifInterface.TAG_RELATED_SOUND_FILE},
{"int", ExifInterface.TAG_SATURATION},
{"int", ExifInterface.TAG_SCENE_CAPTURE_TYPE},
{"string", ExifInterface.TAG_SCENE_TYPE},
{"int", ExifInterface.TAG_SENSING_METHOD},
{"int", ExifInterface.TAG_SHARPNESS},
{"double", ExifInterface.TAG_SHUTTER_SPEED_VALUE},
{"string", ExifInterface.TAG_SPATIAL_FREQUENCY_RESPONSE},
{"string", ExifInterface.TAG_SPECTRAL_SENSITIVITY},
{"int", ExifInterface.TAG_SUBFILE_TYPE},
{"string", ExifInterface.TAG_SUBSEC_TIME},
{"string", ExifInterface.TAG_SUBSEC_TIME_DIGITIZED},
{"string", ExifInterface.TAG_SUBSEC_TIME_ORIGINAL},
{"int", ExifInterface.TAG_SUBJECT_AREA},
{"double", ExifInterface.TAG_SUBJECT_DISTANCE},
{"int", ExifInterface.TAG_SUBJECT_DISTANCE_RANGE},
{"int", ExifInterface.TAG_SUBJECT_LOCATION},
{"string", ExifInterface.TAG_USER_COMMENT},
{"int", ExifInterface.TAG_WHITE_BALANCE},
{"int", ExifInterface.TAG_GPS_ALTITUDE_REF},
{"string", ExifInterface.TAG_GPS_AREA_INFORMATION},
{"double", ExifInterface.TAG_GPS_DOP},
{"string", ExifInterface.TAG_GPS_DATESTAMP},
{"double", ExifInterface.TAG_GPS_DEST_BEARING},
{"string", ExifInterface.TAG_GPS_DEST_BEARING_REF},
{"double", ExifInterface.TAG_GPS_DEST_DISTANCE},
{"string", ExifInterface.TAG_GPS_DEST_DISTANCE_REF},
{"double", ExifInterface.TAG_GPS_DEST_LATITUDE},
{"string", ExifInterface.TAG_GPS_DEST_LATITUDE_REF},
{"double", ExifInterface.TAG_GPS_DEST_LONGITUDE},
{"string", ExifInterface.TAG_GPS_DEST_LONGITUDE_REF},
{"int", ExifInterface.TAG_GPS_DIFFERENTIAL},
{"double", ExifInterface.TAG_GPS_IMG_DIRECTION},
{"string", ExifInterface.TAG_GPS_IMG_DIRECTION_REF},
{"string", ExifInterface.TAG_GPS_LATITUDE_REF},
{"string", ExifInterface.TAG_GPS_LONGITUDE_REF},
{"string", ExifInterface.TAG_GPS_MAP_DATUM},
{"string", ExifInterface.TAG_GPS_MEASURE_MODE},
{"string", ExifInterface.TAG_GPS_PROCESSING_METHOD},
{"string", ExifInterface.TAG_GPS_SATELLITES},
{"double", ExifInterface.TAG_GPS_SPEED},
{"string", ExifInterface.TAG_GPS_SPEED_REF},
{"string", ExifInterface.TAG_GPS_STATUS},
{"string", ExifInterface.TAG_GPS_TIMESTAMP},
{"double", ExifInterface.TAG_GPS_TRACK},
{"string", ExifInterface.TAG_GPS_TRACK_REF},
{"string", ExifInterface.TAG_GPS_VERSION_ID},
{"string", ExifInterface.TAG_INTEROPERABILITY_INDEX},
{"int", ExifInterface.TAG_THUMBNAIL_IMAGE_LENGTH},
{"int", ExifInterface.TAG_THUMBNAIL_IMAGE_WIDTH},
{"int", ExifInterface.TAG_DNG_VERSION},
{"int", ExifInterface.TAG_DEFAULT_CROP_SIZE},
{"int", ExifInterface.TAG_ORF_PREVIEW_IMAGE_START},
{"int", ExifInterface.TAG_ORF_PREVIEW_IMAGE_LENGTH},
{"int", ExifInterface.TAG_ORF_ASPECT_FRAME},
{"int", ExifInterface.TAG_RW2_SENSOR_BOTTOM_BORDER},
{"int", ExifInterface.TAG_RW2_SENSOR_LEFT_BORDER},
{"int", ExifInterface.TAG_RW2_SENSOR_RIGHT_BORDER},
{"int", ExifInterface.TAG_RW2_SENSOR_TOP_BORDER},
{"int", ExifInterface.TAG_RW2_ISO},
};
// Mount error event
public static void emitMountErrorEvent(ViewGroup view) {
CameraMountErrorEvent event = CameraMountErrorEvent.obtain(view.getId());
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
// Camera ready event
public static void emitCameraReadyEvent(ViewGroup view) {
CameraReadyEvent event = CameraReadyEvent.obtain(view.getId());
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
// Face detection events
public static void emitFacesDetectedEvent(
ViewGroup view,
SparseArray<Face> faces,
ImageDimensions dimensions
) {
float density = view.getResources().getDisplayMetrics().density;
double scaleX = (double) view.getWidth() / (dimensions.getWidth() * density);
double scaleY = (double) view.getHeight() / (dimensions.getHeight() * density);
FacesDetectedEvent event = FacesDetectedEvent.obtain(
view.getId(),
faces,
dimensions,
scaleX,
scaleY
);
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
public static void emitFaceDetectionErrorEvent(ViewGroup view, RNFaceDetector faceDetector) {
FaceDetectionErrorEvent event = FaceDetectionErrorEvent.obtain(view.getId(), faceDetector);
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
// Bar code read event
public static void emitBarCodeReadEvent(ViewGroup view, Result barCode) {
BarCodeReadEvent event = BarCodeReadEvent.obtain(view.getId(), barCode);
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
// Utilities
public static int getCorrectCameraRotation(int rotation, int facing) {
if (facing == CameraView.FACING_FRONT) {
return (rotation - 90 + 360) % 360;
} else {
return (-rotation + 90 + 360) % 360;
}
}
public static CamcorderProfile getCamcorderProfile(int quality) {
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
switch (quality) {
case CameraModule.VIDEO_2160P:
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_2160P);
}
break;
case CameraModule.VIDEO_1080P:
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_1080P);
break;
case CameraModule.VIDEO_720P:
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_720P);
break;
case CameraModule.VIDEO_480P:
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
break;
case CameraModule.VIDEO_4x3:
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
profile.videoFrameWidth = 640;
break;
}
return profile;
}
public static WritableMap getExifData(ExifInterface exifInterface) {
WritableMap exifMap = Arguments.createMap();
for (String[] tagInfo : exifTags) {
String name = tagInfo[1];
if (exifInterface.getAttribute(name) != null) {
String type = tagInfo[0];
switch (type) {
case "string":
exifMap.putString(name, exifInterface.getAttribute(name));
break;
case "int":
exifMap.putInt(name, exifInterface.getAttributeInt(name, 0));
break;
case "double":
exifMap.putDouble(name, exifInterface.getAttributeDouble(name, 0));
break;
}
}
}
double[] latLong = exifInterface.getLatLong();
if (latLong != null) {
exifMap.putDouble(ExifInterface.TAG_GPS_LATITUDE, latLong[0]);
exifMap.putDouble(ExifInterface.TAG_GPS_LONGITUDE, latLong[1]);
exifMap.putDouble(ExifInterface.TAG_GPS_ALTITUDE, exifInterface.getAltitude(0));
}
return exifMap;
}
public static Bitmap generateSimulatorPhoto(int width, int height) {
Bitmap fakePhoto = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(fakePhoto);
Paint background = new Paint();
background.setColor(Color.BLACK);
canvas.drawRect(0, 0, width, height, background);
Paint textPaint = new Paint();
textPaint.setColor(Color.YELLOW);
textPaint.setTextSize(35);
Calendar calendar = Calendar.getInstance();
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd.MM.YY HH:mm:ss", Locale.getDefault());
canvas.drawText(simpleDateFormat.format(calendar.getTime()), width * 0.1f, height * 0.9f, textPaint);
return fakePhoto;
}
}

View File

@ -0,0 +1,68 @@
package org.reactnative.camera.events;
import android.support.v4.util.Pools;
import org.reactnative.camera.CameraViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.google.zxing.Result;
import java.util.Date;
public class BarCodeReadEvent extends Event<BarCodeReadEvent> {
private static final Pools.SynchronizedPool<BarCodeReadEvent> EVENTS_POOL =
new Pools.SynchronizedPool<>(3);
private Result mBarCode;
private BarCodeReadEvent() {}
public static BarCodeReadEvent obtain(int viewTag, Result barCode) {
BarCodeReadEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new BarCodeReadEvent();
}
event.init(viewTag, barCode);
return event;
}
private void init(int viewTag, Result barCode) {
super.init(viewTag);
mBarCode = barCode;
}
/**
* We want every distinct barcode to be reported to the JS listener.
* If we return some static value as a coalescing key there may be two barcode events
* containing two different barcodes waiting to be transmitted to JS
* that would get coalesced (because both of them would have the same coalescing key).
* So let's differentiate them with a hash of the contents (mod short's max value).
*/
@Override
public short getCoalescingKey() {
int hashCode = mBarCode.getText().hashCode() % Short.MAX_VALUE;
return (short) hashCode;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_BAR_CODE_READ.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableMap event = Arguments.createMap();
event.putInt("target", getViewTag());
event.putString("data", mBarCode.getText());
event.putString("type", mBarCode.getBarcodeFormat().toString());
return event;
}
}

View File

@ -0,0 +1,44 @@
package org.reactnative.camera.events;
import android.support.v4.util.Pools;
import org.reactnative.camera.CameraViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import java.util.Date;
public class CameraMountErrorEvent extends Event<CameraMountErrorEvent> {
private static final Pools.SynchronizedPool<CameraMountErrorEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private CameraMountErrorEvent() {}
public static CameraMountErrorEvent obtain(int viewTag) {
CameraMountErrorEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new CameraMountErrorEvent();
}
event.init(viewTag);
return event;
}
@Override
public short getCoalescingKey() {
return 0;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_MOUNT_ERROR.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
return Arguments.createMap();
}
}

View File

@ -0,0 +1,44 @@
package org.reactnative.camera.events;
import android.support.v4.util.Pools;
import org.reactnative.camera.CameraViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import java.util.Date;
public class CameraReadyEvent extends Event<CameraReadyEvent> {
private static final Pools.SynchronizedPool<CameraReadyEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private CameraReadyEvent() {}
public static CameraReadyEvent obtain(int viewTag) {
CameraReadyEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new CameraReadyEvent();
}
event.init(viewTag);
return event;
}
@Override
public short getCoalescingKey() {
return 0;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_CAMERA_READY.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
return Arguments.createMap();
}
}

View File

@ -0,0 +1,53 @@
package org.reactnative.camera.events;
import android.support.v4.util.Pools;
import org.reactnative.camera.CameraViewManager;
import org.reactnative.facedetector.RNFaceDetector;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import java.util.Date;
public class FaceDetectionErrorEvent extends Event<FaceDetectionErrorEvent> {
private static final Pools.SynchronizedPool<FaceDetectionErrorEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private RNFaceDetector mFaceDetector;
private FaceDetectionErrorEvent() {}
public static FaceDetectionErrorEvent obtain(int viewTag, RNFaceDetector faceDetector) {
FaceDetectionErrorEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new FaceDetectionErrorEvent();
}
event.init(viewTag);
return event;
}
private void init(int viewTag, RNFaceDetector faceDetector) {
super.init(viewTag);
mFaceDetector = faceDetector;
}
@Override
public short getCoalescingKey() {
return 0;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_MOUNT_ERROR.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableMap map = Arguments.createMap();
map.putBoolean("isOperational", mFaceDetector != null ? mFaceDetector.isOperational() : false);
return map;
}
}

View File

@ -0,0 +1,103 @@
package org.reactnative.camera.events;
import android.support.v4.util.Pools;
import android.util.SparseArray;
import org.reactnative.camera.CameraViewManager;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.facedetector.FaceDetectorUtils;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.google.android.cameraview.CameraView;
import com.google.android.gms.vision.face.Face;
import java.util.Date;
public class FacesDetectedEvent extends Event<FacesDetectedEvent> {
private static final Pools.SynchronizedPool<FacesDetectedEvent> EVENTS_POOL =
new Pools.SynchronizedPool<>(3);
private double mScaleX;
private double mScaleY;
private SparseArray<Face> mFaces;
private ImageDimensions mImageDimensions;
private FacesDetectedEvent() {}
public static FacesDetectedEvent obtain(
int viewTag,
SparseArray<Face> faces,
ImageDimensions dimensions,
double scaleX,
double scaleY
) {
FacesDetectedEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new FacesDetectedEvent();
}
event.init(viewTag, faces, dimensions, scaleX, scaleY);
return event;
}
private void init(
int viewTag,
SparseArray<Face> faces,
ImageDimensions dimensions,
double scaleX,
double scaleY
) {
super.init(viewTag);
mFaces = faces;
mImageDimensions = dimensions;
mScaleX = scaleX;
mScaleY = scaleY;
}
/**
* note(@sjchmiela)
* Should the events about detected faces coalesce, the best strategy will be
* to ensure that events with different faces count are always being transmitted.
*/
@Override
public short getCoalescingKey() {
if (mFaces.size() > Short.MAX_VALUE) {
return Short.MAX_VALUE;
}
return (short) mFaces.size();
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_FACES_DETECTED.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableArray facesList = Arguments.createArray();
for(int i = 0; i < mFaces.size(); i++) {
Face face = mFaces.valueAt(i);
WritableMap serializedFace = FaceDetectorUtils.serializeFace(face, mScaleX, mScaleY);
if (mImageDimensions.getFacing() == CameraView.FACING_FRONT) {
serializedFace = FaceDetectorUtils.rotateFaceX(serializedFace, mImageDimensions.getWidth(), mScaleX);
} else {
serializedFace = FaceDetectorUtils.changeAnglesDirection(serializedFace);
}
facesList.pushMap(serializedFace);
}
WritableMap event = Arguments.createMap();
event.putString("type", "face");
event.putArray("faces", facesList);
event.putInt("target", getViewTag());
return event;
}
}

View File

@ -0,0 +1,74 @@
package org.reactnative.camera.tasks;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.NotFoundException;
import com.google.zxing.PlanarYUVLuminanceSource;
import com.google.zxing.Result;
import com.google.zxing.common.HybridBinarizer;
public class BarCodeScannerAsyncTask extends android.os.AsyncTask<Void, Void, Result> {
private byte[] mImageData;
private int mWidth;
private int mHeight;
private BarCodeScannerAsyncTaskDelegate mDelegate;
private final MultiFormatReader mMultiFormatReader;
// note(sjchmiela): From my short research it's ok to ignore rotation of the image.
public BarCodeScannerAsyncTask(
BarCodeScannerAsyncTaskDelegate delegate,
MultiFormatReader multiFormatReader,
byte[] imageData,
int width,
int height
) {
mImageData = imageData;
mWidth = width;
mHeight = height;
mDelegate = delegate;
mMultiFormatReader = multiFormatReader;
}
@Override
protected Result doInBackground(Void... ignored) {
if (isCancelled() || mDelegate == null) {
return null;
}
Result result = null;
try {
BinaryBitmap bitmap = generateBitmapFromImageData(mImageData, mWidth, mHeight);
result = mMultiFormatReader.decodeWithState(bitmap);
} catch (NotFoundException e) {
// No barcode found, result is already null.
} catch (Throwable t) {
t.printStackTrace();
}
return result;
}
@Override
protected void onPostExecute(Result result) {
super.onPostExecute(result);
if (result != null) {
mDelegate.onBarCodeRead(result);
}
mDelegate.onBarCodeScanningTaskCompleted();
}
private BinaryBitmap generateBitmapFromImageData(byte[] imageData, int width, int height) {
PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(
imageData, // byte[] yuvData
width, // int dataWidth
height, // int dataHeight
0, // int left
0, // int top
width, // int width
height, // int height
false // boolean reverseHorizontal
);
return new BinaryBitmap(new HybridBinarizer(source));
}
}

View File

@ -0,0 +1,8 @@
package org.reactnative.camera.tasks;
import com.google.zxing.Result;
public interface BarCodeScannerAsyncTaskDelegate {
void onBarCodeRead(Result barCode);
void onBarCodeScanningTaskCompleted();
}

View File

@ -0,0 +1,55 @@
package org.reactnative.camera.tasks;
import android.util.SparseArray;
import org.reactnative.facedetector.RNFaceDetector;
import org.reactnative.facedetector.RNFrame;
import org.reactnative.facedetector.RNFrameFactory;
import com.google.android.gms.vision.face.Face;
public class FaceDetectorAsyncTask extends android.os.AsyncTask<Void, Void, SparseArray<Face>> {
private byte[] mImageData;
private int mWidth;
private int mHeight;
private int mRotation;
private RNFaceDetector mFaceDetector;
private FaceDetectorAsyncTaskDelegate mDelegate;
public FaceDetectorAsyncTask(
FaceDetectorAsyncTaskDelegate delegate,
RNFaceDetector faceDetector,
byte[] imageData,
int width,
int height,
int rotation
) {
mImageData = imageData;
mWidth = width;
mHeight = height;
mRotation = rotation;
mDelegate = delegate;
mFaceDetector = faceDetector;
}
@Override
protected SparseArray<Face> doInBackground(Void... ignored) {
if (isCancelled() || mDelegate == null || mFaceDetector == null || !mFaceDetector.isOperational()) {
return null;
}
RNFrame frame = RNFrameFactory.buildFrame(mImageData, mWidth, mHeight, mRotation);
return mFaceDetector.detect(frame);
}
@Override
protected void onPostExecute(SparseArray<Face> faces) {
super.onPostExecute(faces);
if (faces == null) {
mDelegate.onFaceDetectionError(mFaceDetector);
} else {
mDelegate.onFacesDetected(faces, mWidth, mHeight, mRotation);
mDelegate.onFaceDetectingTaskCompleted();
}
}
}

View File

@ -0,0 +1,12 @@
package org.reactnative.camera.tasks;
import android.util.SparseArray;
import org.reactnative.facedetector.RNFaceDetector;
import com.google.android.gms.vision.face.Face;
public interface FaceDetectorAsyncTaskDelegate {
void onFacesDetected(SparseArray<Face> face, int sourceWidth, int sourceHeight, int sourceRotation);
void onFaceDetectionError(RNFaceDetector faceDetector);
void onFaceDetectingTaskCompleted();
}

View File

@ -0,0 +1,203 @@
package org.reactnative.camera.tasks;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.net.Uri;
import android.os.AsyncTask;
import android.support.media.ExifInterface;
import android.util.Base64;
import org.reactnative.camera.RNCameraViewHelper;
import org.reactnative.camera.utils.RNFileUtils;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class ResolveTakenPictureAsyncTask extends AsyncTask<Void, Void, WritableMap> {
private static final String ERROR_TAG = "E_TAKING_PICTURE_FAILED";
private Promise mPromise;
private byte[] mImageData;
private ReadableMap mOptions;
private File mCacheDirectory;
private Bitmap mBitmap;
public ResolveTakenPictureAsyncTask(byte[] imageData, Promise promise, ReadableMap options) {
mPromise = promise;
mOptions = options;
mImageData = imageData;
}
public ResolveTakenPictureAsyncTask(byte[] imageData, Promise promise, ReadableMap options, File cacheDirectory) {
mPromise = promise;
mOptions = options;
mImageData = imageData;
mCacheDirectory = cacheDirectory;
}
private int getQuality() {
return (int) (mOptions.getDouble("quality") * 100);
}
@Override
protected WritableMap doInBackground(Void... voids) {
WritableMap response = Arguments.createMap();
ByteArrayInputStream inputStream = null;
// we need the stream only for photos from a device
if (mBitmap == null) {
mBitmap = BitmapFactory.decodeByteArray(mImageData, 0, mImageData.length);
inputStream = new ByteArrayInputStream(mImageData);
}
try {
if (inputStream != null) {
ExifInterface exifInterface = new ExifInterface(inputStream);
// Get orientation of the image from mImageData via inputStream
int orientation = exifInterface.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_UNDEFINED
);
// Rotate the bitmap to the proper orientation if needed
if (orientation != ExifInterface.ORIENTATION_UNDEFINED) {
mBitmap = rotateBitmap(mBitmap, getImageRotation(orientation));
}
if (mOptions.hasKey("mirrorImage") && mOptions.getBoolean("mirrorImage")) {
mBitmap = flipHorizontally(mBitmap);
}
// Write Exif data to the response if requested
if (mOptions.hasKey("exif") && mOptions.getBoolean("exif")) {
WritableMap exifData = RNCameraViewHelper.getExifData(exifInterface);
response.putMap("exif", exifData);
}
}
// Upon rotating, write the image's dimensions to the response
response.putInt("width", mBitmap.getWidth());
response.putInt("height", mBitmap.getHeight());
// Cache compressed image in imageStream
ByteArrayOutputStream imageStream = new ByteArrayOutputStream();
mBitmap.compress(Bitmap.CompressFormat.JPEG, getQuality(), imageStream);
// Write compressed image to file in cache directory
String filePath = writeStreamToFile(imageStream);
File imageFile = new File(filePath);
String fileUri = Uri.fromFile(imageFile).toString();
response.putString("uri", fileUri);
// Write base64-encoded image to the response if requested
if (mOptions.hasKey("base64") && mOptions.getBoolean("base64")) {
response.putString("base64", Base64.encodeToString(imageStream.toByteArray(), Base64.DEFAULT));
}
// Cleanup
imageStream.close();
if (inputStream != null) {
inputStream.close();
inputStream = null;
}
return response;
} catch (Resources.NotFoundException e) {
mPromise.reject(ERROR_TAG, "Documents directory of the app could not be found.", e);
e.printStackTrace();
} catch (IOException e) {
mPromise.reject(ERROR_TAG, "An unknown I/O exception has occurred.", e);
e.printStackTrace();
} finally {
try {
if (inputStream != null) {
inputStream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
// An exception had to occur, promise has already been rejected. Do not try to resolve it again.
return null;
}
private Bitmap rotateBitmap(Bitmap source, int angle) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, true);
}
private Bitmap flipHorizontally(Bitmap source) {
Matrix matrix = new Matrix();
matrix.preScale(-1.0f, 1.0f);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, true);
}
// Get rotation degrees from Exif orientation enum
private int getImageRotation(int orientation) {
int rotationDegrees = 0;
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotationDegrees = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotationDegrees = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotationDegrees = 270;
break;
}
return rotationDegrees;
}
private String writeStreamToFile(ByteArrayOutputStream inputStream) throws IOException {
String outputPath = null;
IOException exception = null;
FileOutputStream outputStream = null;
try {
outputPath = RNFileUtils.getOutputFilePath(mCacheDirectory, ".jpg");
outputStream = new FileOutputStream(outputPath);
inputStream.writeTo(outputStream);
} catch (IOException e) {
e.printStackTrace();
exception = e;
} finally {
try {
if (outputStream != null) {
outputStream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
if (exception != null) {
throw exception;
}
return outputPath;
}
@Override
protected void onPostExecute(WritableMap response) {
super.onPostExecute(response);
// If the response is not null everything went well and we can resolve the promise.
if (response != null) {
mPromise.resolve(response);
}
}
}

View File

@ -0,0 +1,64 @@
package org.reactnative.camera.utils;
public class ImageDimensions {
private int mWidth;
private int mHeight;
private int mFacing;
private int mRotation;
public ImageDimensions(int width, int height) {
this(width, height, 0);
}
public ImageDimensions(int width, int height, int rotation) {
this(width, height, rotation, -1);
}
public ImageDimensions(int width, int height, int rotation, int facing) {
mWidth = width;
mHeight = height;
mFacing = facing;
mRotation = rotation;
}
public boolean isLandscape() {
return mRotation % 180 == 90;
}
public int getWidth() {
if (isLandscape()) {
return mHeight;
}
return mWidth;
}
public int getHeight() {
if (isLandscape()) {
return mWidth;
}
return mHeight;
}
public int getRotation() {
return mRotation;
}
public int getFacing() {
return mFacing;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof ImageDimensions) {
ImageDimensions otherDimensions = (ImageDimensions) obj;
return (otherDimensions.getWidth() == getWidth() &&
otherDimensions.getHeight() == getHeight() &&
otherDimensions.getFacing() == getFacing() &&
otherDimensions.getRotation() == getRotation());
} else {
return super.equals(obj);
}
}
}

View File

@ -0,0 +1,34 @@
package org.reactnative.camera.utils;
import android.content.Context;
import android.net.Uri;
import java.io.File;
import java.io.IOException;
import java.util.UUID;
/**
* Created by jgfidelis on 23/01/18.
*/
public class RNFileUtils {
public static File ensureDirExists(File dir) throws IOException {
if (!(dir.isDirectory() || dir.mkdirs())) {
throw new IOException("Couldn't create directory '" + dir + "'");
}
return dir;
}
public static String getOutputFilePath(File directory, String extension) throws IOException {
ensureDirExists(directory);
String filename = UUID.randomUUID().toString();
return directory + File.separator + filename + extension;
}
public static Uri uriFromFile(File file) {
return Uri.fromFile(file);
}
}

View File

@ -0,0 +1,27 @@
package org.reactnative.camera.utils;
import android.content.Context;
import java.io.File;
/**
* Created by jgfidelis on 23/01/18.
*/
public class ScopedContext {
private File cacheDirectory = null;
public ScopedContext(Context context) {
createCacheDirectory(context);
}
public void createCacheDirectory(Context context) {
cacheDirectory = new File(context.getCacheDir() + "/Camera/");
}
public File getCacheDirectory() {
return cacheDirectory;
}
}

View File

@ -0,0 +1,76 @@
package org.reactnative.facedetector;
import android.content.Context;
import org.reactnative.facedetector.tasks.FileFaceDetectionAsyncTask;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
public class FaceDetectorModule extends ReactContextBaseJavaModule {
private static final String TAG = "RNFaceDetector";
// private ScopedContext mScopedContext;
private static ReactApplicationContext mScopedContext;
public FaceDetectorModule(ReactApplicationContext reactContext) {
super(reactContext);
mScopedContext = reactContext;
}
@Override
public String getName() {
return TAG;
}
@Nullable
@Override
public Map<String, Object> getConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("Mode", getFaceDetectionModeConstants());
put("Landmarks", getFaceDetectionLandmarksConstants());
put("Classifications", getFaceDetectionClassificationsConstants());
}
private Map<String, Object> getFaceDetectionModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("fast", RNFaceDetector.FAST_MODE);
put("accurate", RNFaceDetector.ACCURATE_MODE);
}
});
}
private Map<String, Object> getFaceDetectionClassificationsConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", RNFaceDetector.ALL_CLASSIFICATIONS);
put("none", RNFaceDetector.NO_CLASSIFICATIONS);
}
});
}
private Map<String, Object> getFaceDetectionLandmarksConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", RNFaceDetector.ALL_LANDMARKS);
put("none", RNFaceDetector.NO_LANDMARKS);
}
});
}
});
}
@ReactMethod
public void detectFaces(ReadableMap options, final Promise promise) {
new FileFaceDetectionAsyncTask(mScopedContext, options, promise).execute();
}
}

View File

@ -0,0 +1,120 @@
package org.reactnative.facedetector;
import android.graphics.PointF;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import com.google.android.gms.vision.face.Face;
import com.google.android.gms.vision.face.Landmark;
public class FaceDetectorUtils {
// All the landmarks reported by Google Mobile Vision in constants' order.
// https://developers.google.com/android/reference/com/google/android/gms/vision/face/Landmark
private static final String[] landmarkNames = {
"bottomMouthPosition", "leftCheekPosition", "leftEarPosition", "leftEarTipPosition",
"leftEyePosition", "leftMouthPosition", "noseBasePosition", "rightCheekPosition",
"rightEarPosition", "rightEarTipPosition", "rightEyePosition", "rightMouthPosition"
};
public static WritableMap serializeFace(Face face) {
return serializeFace(face, 1, 1);
}
public static WritableMap serializeFace(Face face, double scaleX, double scaleY) {
WritableMap encodedFace = Arguments.createMap();
encodedFace.putInt("faceID", face.getId());
encodedFace.putDouble("rollAngle", face.getEulerZ());
encodedFace.putDouble("yawAngle", face.getEulerY());
if (face.getIsSmilingProbability() >= 0) {
encodedFace.putDouble("smilingProbability", face.getIsSmilingProbability());
}
if (face.getIsLeftEyeOpenProbability() >= 0) {
encodedFace.putDouble("leftEyeOpenProbability", face.getIsLeftEyeOpenProbability());
}
if (face.getIsRightEyeOpenProbability() >= 0) {
encodedFace.putDouble("rightEyeOpenProbability", face.getIsRightEyeOpenProbability());
}
for(Landmark landmark : face.getLandmarks()) {
encodedFace.putMap(landmarkNames[landmark.getType()], mapFromPoint(landmark.getPosition(), scaleX, scaleY));
}
WritableMap origin = Arguments.createMap();
origin.putDouble("x", face.getPosition().x * scaleX);
origin.putDouble("y", face.getPosition().y * scaleY);
WritableMap size = Arguments.createMap();
size.putDouble("width", face.getWidth() * scaleX);
size.putDouble("height", face.getHeight() * scaleY);
WritableMap bounds = Arguments.createMap();
bounds.putMap("origin", origin);
bounds.putMap("size", size);
encodedFace.putMap("bounds", bounds);
return encodedFace;
}
public static WritableMap rotateFaceX(WritableMap face, int sourceWidth, double scaleX) {
ReadableMap faceBounds = face.getMap("bounds");
ReadableMap oldOrigin = faceBounds.getMap("origin");
WritableMap mirroredOrigin = positionMirroredHorizontally(oldOrigin, sourceWidth, scaleX);
double translateX = -faceBounds.getMap("size").getDouble("width");
WritableMap translatedMirroredOrigin = positionTranslatedHorizontally(mirroredOrigin, translateX);
WritableMap newBounds = Arguments.createMap();
newBounds.merge(faceBounds);
newBounds.putMap("origin", translatedMirroredOrigin);
for (String landmarkName : landmarkNames) {
ReadableMap landmark = face.hasKey(landmarkName) ? face.getMap(landmarkName) : null;
if (landmark != null) {
WritableMap mirroredPosition = positionMirroredHorizontally(landmark, sourceWidth, scaleX);
face.putMap(landmarkName, mirroredPosition);
}
}
face.putMap("bounds", newBounds);
return face;
}
public static WritableMap changeAnglesDirection(WritableMap face) {
face.putDouble("rollAngle", (-face.getDouble("rollAngle") + 360) % 360);
face.putDouble("yawAngle", (-face.getDouble("yawAngle") + 360) % 360);
return face;
}
public static WritableMap mapFromPoint(PointF point, double scaleX, double scaleY) {
WritableMap map = Arguments.createMap();
map.putDouble("x", point.x * scaleX);
map.putDouble("y", point.y * scaleY);
return map;
}
public static WritableMap positionTranslatedHorizontally(ReadableMap position, double translateX) {
WritableMap newPosition = Arguments.createMap();
newPosition.merge(position);
newPosition.putDouble("x", position.getDouble("x") + translateX);
return newPosition;
}
public static WritableMap positionMirroredHorizontally(ReadableMap position, int containerWidth, double scaleX) {
WritableMap newPosition = Arguments.createMap();
newPosition.merge(position);
newPosition.putDouble("x", valueMirroredHorizontally(position.getDouble("x"), containerWidth, scaleX));
return newPosition;
}
public static double valueMirroredHorizontally(double elementX, int containerWidth, double scaleX) {
double originalX = elementX / scaleX;
double mirroredX = containerWidth - originalX;
return mirroredX * scaleX;
}
}

View File

@ -0,0 +1,113 @@
package org.reactnative.facedetector;
import android.content.Context;
import android.util.Log;
import android.util.SparseArray;
import org.reactnative.camera.utils.ImageDimensions;
import com.google.android.gms.vision.face.Face;
import com.google.android.gms.vision.face.FaceDetector;
public class RNFaceDetector {
public static int ALL_CLASSIFICATIONS = FaceDetector.ALL_CLASSIFICATIONS;
public static int NO_CLASSIFICATIONS = FaceDetector.NO_CLASSIFICATIONS;
public static int ALL_LANDMARKS = FaceDetector.ALL_LANDMARKS;
public static int NO_LANDMARKS = FaceDetector.NO_LANDMARKS;
public static int ACCURATE_MODE = FaceDetector.ACCURATE_MODE;
public static int FAST_MODE = FaceDetector.FAST_MODE;
private FaceDetector mFaceDetector = null;
private ImageDimensions mPreviousDimensions;
private FaceDetector.Builder mBuilder = null;
private int mClassificationType = NO_CLASSIFICATIONS;
private int mLandmarkType = NO_LANDMARKS;
private float mMinFaceSize = 0.15f;
private int mMode = FAST_MODE;
public RNFaceDetector(Context context) {
mBuilder = new FaceDetector.Builder(context);
mBuilder.setMinFaceSize(mMinFaceSize);
mBuilder.setMode(mMode);
mBuilder.setLandmarkType(mLandmarkType);
mBuilder.setClassificationType(mClassificationType);
}
// Public API
public boolean isOperational() {
if (mFaceDetector == null) {
createFaceDetector();
}
return mFaceDetector.isOperational();
}
public SparseArray<Face> detect(RNFrame frame) {
// If the frame has different dimensions, create another face detector.
// Otherwise we will get nasty "inconsistent image dimensions" error from detector
// and no face will be detected.
if (!frame.getDimensions().equals(mPreviousDimensions)) {
releaseFaceDetector();
}
if (mFaceDetector == null) {
createFaceDetector();
mPreviousDimensions = frame.getDimensions();
}
return mFaceDetector.detect(frame.getFrame());
}
public void setTracking(boolean trackingEnabled) {
release();
mBuilder.setTrackingEnabled(trackingEnabled);
}
public void setClassificationType(int classificationType) {
if (classificationType != mClassificationType) {
release();
mBuilder.setClassificationType(classificationType);
mClassificationType = classificationType;
}
}
public void setLandmarkType(int landmarkType) {
if (landmarkType != mLandmarkType) {
release();
mBuilder.setLandmarkType(landmarkType);
mLandmarkType = landmarkType;
}
}
public void setMode(int mode) {
if (mode != mMode) {
release();
mBuilder.setMode(mode);
mMode = mode;
}
}
public void setTrackingEnabled(boolean tracking) {
release();
mBuilder.setTrackingEnabled(tracking);
}
public void release() {
releaseFaceDetector();
mPreviousDimensions = null;
}
// Lifecycle methods
private void releaseFaceDetector() {
if (mFaceDetector != null) {
mFaceDetector.release();
mFaceDetector = null;
}
}
private void createFaceDetector() {
mFaceDetector = mBuilder.build();
}
}

View File

@ -0,0 +1,28 @@
package org.reactnative.facedetector;
import org.reactnative.camera.utils.ImageDimensions;
import com.google.android.gms.vision.Frame;
/**
* Wrapper around Frame allowing us to track Frame dimensions.
* Tracking dimensions is used in RNFaceDetector to provide painless FaceDetector recreation
* when image dimensions change.
*/
public class RNFrame {
private Frame mFrame;
private ImageDimensions mDimensions;
public RNFrame(Frame frame, ImageDimensions dimensions) {
mFrame = frame;
mDimensions = dimensions;
}
public Frame getFrame() {
return mFrame;
}
public ImageDimensions getDimensions() {
return mDimensions;
}
}

View File

@ -0,0 +1,43 @@
package org.reactnative.facedetector;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import org.reactnative.camera.utils.ImageDimensions;
import com.google.android.gms.vision.Frame;
import java.nio.ByteBuffer;
public class RNFrameFactory {
public static RNFrame buildFrame(byte[] bitmapData, int width, int height, int rotation) {
Frame.Builder builder = new Frame.Builder();
ByteBuffer byteBuffer = ByteBuffer.wrap(bitmapData);
builder.setImageData(byteBuffer, width, height, ImageFormat.NV21);
switch (rotation) {
case 90:
builder.setRotation(Frame.ROTATION_90);
break;
case 180:
builder.setRotation(Frame.ROTATION_180);
break;
case 270:
builder.setRotation(Frame.ROTATION_270);
break;
default:
builder.setRotation(Frame.ROTATION_0);
}
ImageDimensions dimensions = new ImageDimensions(width, height, rotation);
return new RNFrame(builder.build(), dimensions);
}
public static RNFrame buildFrame(Bitmap bitmap) {
Frame.Builder builder = new Frame.Builder();
builder.setBitmap(bitmap);
ImageDimensions dimensions = new ImageDimensions(bitmap.getWidth(), bitmap.getHeight());
return new RNFrame(builder.build(), dimensions);
}
}

View File

@ -0,0 +1,153 @@
package org.reactnative.facedetector.tasks;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.AsyncTask;
import android.util.Log;
import android.util.SparseArray;
import org.reactnative.facedetector.RNFaceDetector;
import org.reactnative.facedetector.RNFrame;
import org.reactnative.facedetector.RNFrameFactory;
import org.reactnative.facedetector.FaceDetectorUtils;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.google.android.gms.vision.Frame;
import com.google.android.gms.vision.face.Face;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
public class FileFaceDetectionAsyncTask extends AsyncTask<Void, Void, SparseArray<Face>> {
private static final String ERROR_TAG = "E_FACE_DETECTION_FAILED";
private static final String MODE_OPTION_KEY = "mode";
private static final String DETECT_LANDMARKS_OPTION_KEY = "detectLandmarks";
private static final String RUN_CLASSIFICATIONS_OPTION_KEY = "runClassifications";
private String mUri;
private String mPath;
private Promise mPromise;
private int mWidth = 0;
private int mHeight = 0;
private Context mContext;
private ReadableMap mOptions;
private int mOrientation = ExifInterface.ORIENTATION_UNDEFINED;
private RNFaceDetector mRNFaceDetector;
public FileFaceDetectionAsyncTask(Context context, ReadableMap options, Promise promise) {
mUri = options.getString("uri");
mPromise = promise;
mOptions = options;
mContext = context;
}
@Override
protected void onPreExecute() {
if (mUri == null) {
mPromise.reject(ERROR_TAG, "You have to provide an URI of an image.");
cancel(true);
return;
}
Uri uri = Uri.parse(mUri);
mPath = uri.getPath();
if (mPath == null) {
mPromise.reject(ERROR_TAG, "Invalid URI provided: `" + mUri + "`.");
cancel(true);
return;
}
// We have to check if the requested image is in a directory safely accessible by our app.
boolean fileIsInSafeDirectories =
mPath.startsWith(mContext.getCacheDir().getPath()) || mPath.startsWith(mContext.getFilesDir().getPath());
if (!fileIsInSafeDirectories) {
mPromise.reject(ERROR_TAG, "The image has to be in the local app's directories.");
cancel(true);
return;
}
if(!new File(mPath).exists()) {
mPromise.reject(ERROR_TAG, "The file does not exist. Given path: `" + mPath + "`.");
cancel(true);
}
}
@Override
protected SparseArray<Face> doInBackground(Void... voids) {
if (isCancelled()) {
return null;
}
mRNFaceDetector = detectorForOptions(mOptions, mContext);
Bitmap bitmap = BitmapFactory.decodeFile(mPath);
mWidth = bitmap.getWidth();
mHeight = bitmap.getHeight();
try {
ExifInterface exif = new ExifInterface(mPath);
mOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED);
} catch (IOException e) {
Log.e(ERROR_TAG, "Reading orientation from file `" + mPath + "` failed.", e);
}
RNFrame frame = RNFrameFactory.buildFrame(bitmap);
return mRNFaceDetector.detect(frame);
}
@Override
protected void onPostExecute(SparseArray<Face> faces) {
super.onPostExecute(faces);
WritableMap result = Arguments.createMap();
WritableArray facesArray = Arguments.createArray();
for(int i = 0; i < faces.size(); i++) {
Face face = faces.valueAt(i);
WritableMap encodedFace = FaceDetectorUtils.serializeFace(face);
encodedFace.putDouble("yawAngle", (-encodedFace.getDouble("yawAngle") + 360) % 360);
encodedFace.putDouble("rollAngle", (-encodedFace.getDouble("rollAngle") + 360) % 360);
facesArray.pushMap(encodedFace);
}
result.putArray("faces", facesArray);
WritableMap image = Arguments.createMap();
image.putInt("width", mWidth);
image.putInt("height", mHeight);
image.putInt("orientation", mOrientation);
image.putString("uri", mUri);
result.putMap("image", image);
mRNFaceDetector.release();
mPromise.resolve(result);
}
private static RNFaceDetector detectorForOptions(ReadableMap options, Context context) {
RNFaceDetector detector = new RNFaceDetector(context);
detector.setTrackingEnabled(false);
if(options.hasKey(MODE_OPTION_KEY)) {
detector.setMode(options.getInt(MODE_OPTION_KEY));
}
if(options.hasKey(RUN_CLASSIFICATIONS_OPTION_KEY)) {
detector.setClassificationType(options.getInt(RUN_CLASSIFICATIONS_OPTION_KEY));
}
if(options.hasKey(DETECT_LANDMARKS_OPTION_KEY)) {
detector.setLandmarkType(options.getInt(DETECT_LANDMARKS_OPTION_KEY));
}
return detector;
}
}

30
commitlint.config.js Normal file
View File

@ -0,0 +1,30 @@
module.exports = {
rules: {
'body-leading-blank': [1, 'always'],
'footer-leading-blank': [1, 'always'],
'header-max-length': [2, 'always', 72],
'scope-case': [2, 'always', 'lower-case'],
'subject-case': [2, 'never', ['sentence-case', 'start-case', 'pascal-case', 'upper-case']],
'subject-empty': [2, 'never'],
'subject-full-stop': [2, 'never', '.'],
'type-case': [2, 'always', 'lower-case'],
'type-empty': [2, 'never'],
'type-enum': [
2,
'always',
[
'build',
'chore',
'ci',
'docs',
'feat',
'fix',
'perf',
'refactor',
'revert',
'style',
'test',
],
],
},
};

60
docs/QA.md Normal file
View File

@ -0,0 +1,60 @@
## Q & A
#### meta-data android 26
```
AndroidManifest.xml:25:13-35 Error:
Attribute meta-data#android.support.VERSION@value value=(26.0.2) from [com.android.support:exifinterface:26.0.2] Android
Manifest.xml:25:13-35
is also present at [com.android.support:support-v4:26.0.1] AndroidManifest.xml:28:13-35 value=(26.0.1).
Suggestion: add 'tools:replace="android:value"' to <meta-data> element at AndroidManifest.xml:23:9-25:38 to override.
```
Add this to your AndroidManifest.xml:
- [ ] xmlns:tools="http://schemas.android.com/tools"
```xml
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
```
- [ ] tools:node="replace"
```xml
<application
android:name=".MainApplication"
android:allowBackup="true"
android:label="@string/app_name"
android:icon="@mipmap/ic_launcher"
android:theme="@style/AppTheme"
tools:node="replace"
>
```
#### When I try to build my project, I get following error:
```
Execution failed for task ':app:processDebugManifest'.
> Manifest merger failed : Attribute meta-data#android.support.VERSION@value value=(26.0.2) from [com.android.support:exifinterface:26.0.2] AndroidManifest.xml:25:13-35
is also present at [com.android.support:support-v4:26.0.1] AndroidManifest.xml:28:13-35 value=(26.0.1).
Suggestion: add 'tools:replace="android:value"' to <meta-data> element at AndroidManifest.xml:23:9-25:38 to override.
```
#### As the error message hints `com.android.support:exifinterface:26.0.2` is already found in `com.android.support:support-v4:26.0.1`
To fix this issue, modify your project's `android/app/build.gradle` as follows:
```Gradle
dependencies {
compile (project(':react-native-camera')) {
exclude group: "com.android.support"
// uncomment this if also com.google.android.gms:play-services-vision versions are conflicting
// this can happen if you use react-native-firebase
// exclude group: "com.google.android.gms"
}
compile ('com.android.support:exifinterface:26.0.1') {
force = true;
}
// uncomment this if you uncommented the previous line
// compile ('com.google.android.gms:play-services-vision:11.6.0') {
// force = true;
// }
}
```

393
docs/RCTCamera.md Normal file
View File

@ -0,0 +1,393 @@
# RCTCamera
As of 1.0.0 release, RCTCamera is deprecated. Please use RNCamera for the latest fixes and improvements.
## Usage
All you need is to `import` `Camera` from the `react-native-camera` module and then use the
`<Camera/>` tag.
```javascript
'use strict';
import React, { Component } from 'react';
import {
AppRegistry,
Dimensions,
StyleSheet,
Text,
TouchableHighlight,
View
} from 'react-native';
import Camera from 'react-native-camera';
class BadInstagramCloneApp extends Component {
render() {
return (
<View style={styles.container}>
<Camera
ref={(cam) => {
this.camera = cam;
}}
onBarCodeRead={this.onBarCodeRead.bind(this)}
style={styles.preview}
aspect={Camera.constants.Aspect.fill}>
<Text style={styles.capture} onPress={this.takePicture.bind(this)}>[CAPTURE]</Text>
</Camera>
</View>
);
}
onBarCodeRead(e) {
console.log(
"Barcode Found!",
"Type: " + e.type + "\nData: " + e.data
);
}
takePicture() {
const options = {};
//options.location = ...
this.camera.capture({metadata: options})
.then((data) => console.log(data))
.catch(err => console.error(err));
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'row',
},
preview: {
flex: 1,
justifyContent: 'flex-end',
alignItems: 'center'
},
capture: {
flex: 0,
backgroundColor: '#fff',
borderRadius: 5,
color: '#000',
padding: 10,
margin: 40
}
});
AppRegistry.registerComponent('BadInstagramCloneApp', () => BadInstagramCloneApp);
```
## Properties
#### `aspect`
Values: `Camera.constants.Aspect.fit` or `"fit"`, `Camera.constants.Aspect.fill` or `"fill"` (default), `Camera.constants.Aspect.stretch` or `"stretch"`
The `aspect` property allows you to define how your viewfinder renders the camera's view. For instance, if you have a square viewfinder and you want to fill it entirely, you have two options: `"fill"`, where the aspect ratio of the camera's view is preserved by cropping the view or `"stretch"`, where the aspect ratio is skewed in order to fit the entire image inside the viewfinder. The other option is `"fit"`, which ensures the camera's entire view fits inside your viewfinder without altering the aspect ratio.
#### `cropToPreview`
Values: `true` or `false` (default)
Will crop the captured image to match the content that is displayed in the preview view. Works on both `Android` and `iOS`. Will be ignored if `captureMode` is other then `Camera.constants.CaptureMode.still`.
#### `iOS` `captureAudio`
Values: `true` (Boolean), `false` (default)
*Applies to video capture mode only.* Specifies whether or not audio should be captured with the video.
#### `captureMode`
Values: `Camera.constants.CaptureMode.still` (default), `Camera.constants.CaptureMode.video`
The type of capture that will be performed by the camera - either a still image or video.
#### `captureTarget`
Values: `Camera.constants.CaptureTarget.cameraRoll` (default), `Camera.constants.CaptureTarget.disk`, `Camera.constants.CaptureTarget.temp`, ~~`Camera.constants.CaptureTarget.memory`~~ (deprecated),
This property allows you to specify the target output of the captured image data. The disk output has been shown to improve capture response time, so that is the recommended value. When using the deprecated memory output, the image binary is sent back as a base64-encoded string.
#### `captureQuality`
Values: `Camera.constants.CaptureQuality.high` or `"high"` (default), `Camera.constants.CaptureQuality.medium` or `"medium"`, `Camera.constants.CaptureQuality.low` or `"low"`, `Camera.constants.CaptureQuality.photo` or `"photo"`, `Camera.constants.CaptureQuality["1080p"]` or `"1080p"`, `Camera.constants.CaptureQuality["720p"]` or `"720p"`, `Camera.constants.CaptureQuality["480p"]` or `"480p"`.
This property allows you to specify the quality output of the captured image or video. By default the quality is set to high.
When choosing more-specific quality settings (1080p, 720p, 480p), note that each platform and device supports different valid picture/video sizes, and actual resolution within each of these quality settings might differ. There should not be too much variance (if any) for iOS; 1080p should give 1920x1080, 720p should give 1280x720, and 480p should give 640x480 (note that iOS 480p therefore is NOT the typical 16:9 HD aspect ratio, and the typically-HD camera preview screen may differ greatly in aspect from what you actually record!!). For Android, expect more variance: on most Androids, 1080p *should* give 1920x1080 and 720p *should* give 1280x720; however, 480p will at "best" be 853x480 (16:9 HD aspect ratio), but falls back/down to 800x480, 720x480, or "worse", depending on what is closest-but-less-than 853x480 and available on the actual device. If your application requires knowledge of the precise resolution of the output image/video, you might consider manually determine the actual resolution itself after capture has completed (particularly for 480p on Android).
#### `type`
Values: `Camera.constants.Type.front` or `"front"`, `Camera.constants.Type.back` or `"back"` (default)
Use the `type` property to specify which camera to use.
#### `orientation`
Values:
`Camera.constants.Orientation.auto` or `"auto"` (default),
`Camera.constants.Orientation.landscapeLeft` or `"landscapeLeft"`, `Camera.constants.Orientation.landscapeRight` or `"landscapeRight"`, `Camera.constants.Orientation.portrait` or `"portrait"`, `Camera.constants.Orientation.portraitUpsideDown` or `"portraitUpsideDown"`
The `orientation` property allows you to specify the current orientation of the phone to ensure the viewfinder is "the right way up."
#### `Android` `playSoundOnCapture`
Values: `true` (default) or `false`
This property allows you to specify whether a shutter sound is played on capture. It is currently android only, pending [a reasonable mute implementation](http://stackoverflow.com/questions/4401232/avfoundation-how-to-turn-off-the-shutter-sound-when-capturestillimageasynchrono) in iOS.
#### `onBarCodeRead`
Will call the specified method when a barcode is detected in the camera's view.
Event contains `data` (the data in the barcode) and `bounds` (the rectangle which outlines the barcode.)
The following barcode types can be recognised:
- `aztec`
- `code128`
- `code39`
- `code39mod43`
- `code93`
- `ean13` (`iOS` converts `upca` barcodes to `ean13` by adding a leading 0)
- `ean8`
- `pdf417`
- `qr`
- `upce`
- `interleaved2of5` (when available)
- `itf14` (when available)
- `datamatrix` (when available)
The barcode type is provided in the `data` object.
#### `barCodeTypes`
An array of barcode types to search for. Defaults to all types listed above. No effect if `onBarCodeRead` is undefined.
Example: `<Camera barCodeTypes={[Camera.constants.BarCodeType.qr]} />`
#### `flashMode`
Values:
`Camera.constants.FlashMode.on`,
`Camera.constants.FlashMode.off`,
`Camera.constants.FlashMode.auto`
Use the `flashMode` property to specify the camera flash mode.
#### `torchMode`
Values:
`Camera.constants.TorchMode.on`,
`Camera.constants.TorchMode.off`,
`Camera.constants.TorchMode.auto`
Use the `torchMode` property to specify the camera torch mode.
#### `onFocusChanged: Event { nativeEvent: { touchPoint: { x, y } }`
iOS: Called when a touch focus gesture has been made.
By default, `onFocusChanged` is not defined and tap-to-focus is disabled.
Android: This callback is not yet implemented. However, Android will
automatically do tap-to-focus if the device supports auto-focus; there is
currently no way to manage this from javascript.
To get autofocus/tap to focus functionalities working correctly in android
make sure that the proper permissions are set in your `AndroidManifest.xml`:
```java
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
```
#### `iOS` `defaultOnFocusComponent`
Values:
`true` (default)
`false`
If `defaultOnFocusComponent` set to false, default internal implementation of visual feedback for tap-to-focus gesture will be disabled.
#### `iOS` `onZoomChanged: Event { nativeEvent: { velocity, zoomFactor } }`
iOS: Called when focus has changed.
By default, `onZoomChanged` is not defined and pinch-to-zoom is disabled.
Android: This callback is not yet implemented. However, Android will
automatically handle pinch-to-zoom; there is currently no way to manage this
from javascript.
#### `iOS` `keepAwake`
If set to `true`, the device will not sleep while the camera preview is visible. This mimics the behavior of the default camera app, which keeps the device awake while open.
#### `Android` `clearWindowBackground`
Values:
`true`
`false` (default)
If you encounter performance issue while using a window background drawable (typically defined in theme to emulate splashscreen behavior), set this to true to automatically clear window background once camera is started.
#### `Android` `permissionDialogTitle`
Starting on android M individual permissions must be granted for certain services, the camera is one of them, you can use this to change the title of the dialog prompt requesting permissions.
#### `Android` `permissionDialogMessage`
Starting on android M individual permissions must be granted for certain services, the camera is one of them, you can use this to change the content of the dialog prompt requesting permissions.
#### `notAuthorizedView`
By default a `Camera not authorized` message will be displayed when access to the camera has been denied, if set displays the passed react element instead of the default one.
#### `pendingAuthorizationView`
By default a <ActivityIndicator> will be displayed while the component is waiting for the user to grant/deny access to the camera, if set displays the passed react element instead of the default one.
#### `pendingAuthorizationView`
#### `mirrorImage`
If set to `true`, the image returned will be mirrored.
#### `fixOrientation` (_deprecated_)
If set to `true`, the image returned will be rotated to the _right way up_. WARNING: It uses a significant amount of memory and my cause your application to crash if the device cannot provide enough RAM to perform the rotation.
(_If you find that you need to use this option because your images are incorrectly oriented by default,
could please submit a PR and include the make model of the device. We believe that it's not
required functionality any more and would like to remove it._)
## Component instance methods
You can access component methods by adding a `ref` (ie. `ref="camera"`) prop to your `<Camera>` element, then you can use `this.refs.camera.capture(cb)`, etc. inside your component.
#### `capture([options]): Promise`
Captures data from the camera. What is captured is based on the `captureMode` and `captureTarget` props. `captureMode` tells the camera whether you want a still image or video. `captureTarget` allows you to specify how you want the data to be captured and sent back to you. See `captureTarget` under Properties to see the available values.
Supported options:
- `audio` (See `captureAudio` under Properties)
- `mode` (See `captureMode` under Properties)
- `target` (See `captureTarget` under Properties)
- `metadata` This is metadata to be added to the captured image.
- `location` This is the object returned from `navigator.geolocation.getCurrentPosition()` (React Native's geolocation polyfill). It will add GPS metadata to the image.
- `rotation` This will rotate the image by the number of degrees specified.
- `jpegQuality` (integer between 1 and 100) This property is used to compress the output jpeg file with 100% meaning no jpeg compression will be applied.
- `totalSeconds` This will limit video length by number of seconds specified. Only works in video capture mode.
The promise will be fulfilled with an object with some of the following properties:
- `data`: Returns a base64-encoded string with the capture data (only returned with the deprecated `Camera.constants.CaptureTarget.memory`)
- `path`: Returns the path of the captured image or video file on disk
- `width`: (not yet implemented for Android video) returns the image or video file's frame width (taking image orientation into account)
- `height`: (not yet implemented for Android video) returns the image or video file's frame height (taking image orientation into account)
- `duration`: (currently iOS video only) video file duration
- `size`: (currently iOS video only) video file size (in bytes)
#### `iOS` `getFOV(): Promise`
Returns the camera's current field of view.
#### `hasFlash(): Promise`
Returns whether or not the camera has flash capabilities.
#### `stopCapture()`
Ends the current capture session for video captures. Only applies when the current `captureMode` is `video`.
#### `stopPreview()`
Stops the camera preview from running, and natively will make the current capture session pause.
#### `startPreview()`
Starts the camera preview again if previously stopped.
## Component static methods
#### `iOS` `Camera.checkDeviceAuthorizationStatus(): Promise`
Exposes the native API for checking if the device has authorized access to the camera (camera and microphone permissions). Can be used to call before loading the Camera component to ensure proper UX. The promise will be fulfilled with `true` or `false` depending on whether the device is authorized. Note, [as of iOS 10](https://developer.apple.com/library/content/documentation/AudioVideo/Conceptual/PhotoCaptureGuide/#//apple_ref/doc/uid/TP40017511-CH1-DontLinkElementID_3), you will need to add `NSCameraUsageDescription` and `NSMicrophoneUsageDescription` to your XCode project's Info.plist file or you might experience a crash.
#### `iOS` `Camera.checkVideoAuthorizationStatus(): Promise`
The same as `Camera.checkDeviceAuthorizationStatus()` but only checks the camera permission. Note, as of iOS 10, you will need to add `NSCameraUsageDescription` to your XCode project's Info.plist file or you might experience a crash.
#### `iOS` `Camera.checkAudioAuthorizationStatus(): Promise`
The same as `Camera.checkDeviceAuthorizationStatus()` but only checks the microphone permission. Note, as of iOS 10, you will need to add `NSMicrophoneUsageDescription` to your XCode project's Info.plist file or you might experience a crash.
## Subviews
This component supports subviews, so if you wish to use the camera view as a background or if you want to layout buttons/images/etc. inside the camera then you can do that.
## Example
To see more of the `RCTCamera` in action, you can check out the source in [Example](https://github.com/lwansbrough/react-native-camera/tree/master/Example) folder.
## Open Collective
We are just beginning a funding campaign for react-native-camera. Contributions are greatly appreciated. When we gain more than $250 we will begin distributing funds to core maintainers in a fully transparent manner. Feedback for this process is welcomed, we will continue to evolve the strategy as we grow and learn more.
### Backers
Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/react-native-camera#backer)]
<a href="https://opencollective.com/react-native-camera/backer/0/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/0/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/1/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/1/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/2/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/2/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/3/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/3/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/4/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/4/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/5/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/5/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/6/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/6/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/7/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/7/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/8/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/8/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/9/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/9/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/10/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/10/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/11/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/11/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/12/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/12/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/13/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/13/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/14/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/14/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/15/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/15/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/16/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/16/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/17/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/17/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/18/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/18/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/19/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/19/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/20/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/20/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/21/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/21/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/22/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/22/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/23/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/23/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/24/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/24/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/25/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/25/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/26/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/26/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/27/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/27/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/28/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/28/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/29/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/29/avatar.svg"></a>
### Sponsors
Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/react-native-camera#sponsor)]
<a href="https://opencollective.com/react-native-camera/sponsor/0/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/0/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/1/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/1/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/2/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/2/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/3/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/3/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/4/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/4/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/5/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/5/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/6/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/6/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/7/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/7/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/8/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/8/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/9/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/9/avatar.svg"></a>
------------
Thanks to Brent Vatne (@brentvatne) for the `react-native-video` module which provided me with a great example of how to set up this module.

364
docs/RNCamera.md Normal file
View File

@ -0,0 +1,364 @@
# RNCamera
## Usage
All you need is to `import` `{ RNCamera }` from the `react-native-camera` module and then use the
`<RNCamera/>` tag.
```javascript
'use strict';
import React, { Component } from 'react';
import {
AppRegistry,
Dimensions,
StyleSheet,
Text,
TouchableOpacity,
View
} from 'react-native';
import { RNCamera } from 'react-native-camera';
class BadInstagramCloneApp extends Component {
render() {
return (
<View style={styles.container}>
<RNCamera
ref={ref => {
this.camera = ref;
}}
style = {styles.preview}
type={RNCamera.Constants.Type.back}
flashMode={RNCamera.Constants.FlashMode.on}
permissionDialogTitle={'Permission to use camera'}
permissionDialogMessage={'We need your permission to use your camera phone'}
/>
<View style={{flex: 0, flexDirection: 'row', justifyContent: 'center',}}>
<TouchableOpacity
onPress={this.takePicture.bind(this)}
style = {styles.capture}
>
<Text style={{fontSize: 14}}> SNAP </Text>
</TouchableOpacity>
</View>
</View>
);
}
}
takePicture = async function() {
if (this.camera) {
const options = { quality: 0.5, base64: true };
const data = await this.camera.takePictureAsync(options)
console.log(data.uri);
}
};
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'column',
backgroundColor: 'black'
},
preview: {
flex: 1,
justifyContent: 'flex-end',
alignItems: 'center'
},
capture: {
flex: 0,
backgroundColor: '#fff',
borderRadius: 5,
padding: 15,
paddingHorizontal: 20,
alignSelf: 'center',
margin: 20
}
});
AppRegistry.registerComponent('BadInstagramCloneApp', () => BadInstagramCloneApp);
```
## Properties
#### `autoFocus`
Values: `RNCamera.Constants.AutoFocus.on` (default) or `RNCamera.Constants.AutoFocus.off`
Most cameras have a Auto Focus feature. It adjusts your camera lens position automatically depending on the pixels seen by your camera.
Use the `autoFocus` property to specify the auto focus setting of your camera. `RNCamera.Constants.AutoFocus.on` turns it ON, `RNCamera.Constants.AutoFocus.off` turns it OFF.
#### `iOS` `captureAudio`
Values: `true` (Boolean), `false` (default)
Specifies whether or not audio should be captured with the video. If `true`, app will request for microphone permission along with video permission.
#### `flashMode`
Values: `RNCamera.Constants.FlashMode.off` (default), `RNCamera.Constants.FlashMode.on`, `RNCamera.Constants.FlashMode.auto` or `RNCamera.Constants.FlashMode.torch`.
Specifies the flash mode of your camera.
`RNCamera.Constants.FlashMode.off` turns it off.
`RNCamera.Constants.FlashMode.on` means camera will use flash in all photos taken.
`RNCamera.Constants.FlashMode.auto` leaves your phone to decide when to use flash when taking photos, based on the lightning conditions that the camera observes.
`RNCamera.Constants.FlashMode.torch` turns on torch mode, meaning the flash light will be turned on all the time (even before taking photo) just like a flashlight.
#### `focusDepth`
Value: float from `0` to `1.0`
Manually set camera focus. Only works with `autoFocus` off. The value 0 is minimum focus depth, 1 is maximum focus depth. For a medium focus depth, for example, you could use 0.5.
#### `Android` `ratio`
A string representing the camera ratio in the format 'height:width'. Default is `"4:3"`.
Use `getSupportedRatiosAsync` method to get ratio strings supported by your camera on Android.
## Component instance methods
#### `type`
Values: `RNCamera.Constants.Type.front` or `RNCamera.Constants.Type.back` (default)
Use the `type` property to specify which camera to use.
#### `whiteBalance`
Values: `RNCamera.Constants.WhiteBalance.sunny`, `RNCamera.Constants.WhiteBalance.cloudy`, `RNCamera.Constants.WhiteBalance.shadow`, `RNCamera.Constants.WhiteBalance.incandescent`, `RNCamera.Constants.WhiteBalance.fluorescent` or `RNCamera.Constants.WhiteBalance.auto` (default)
A cameras white balance setting allows you to control the color temperature in your photos by cooling down or warming up the colors.
The idea is that you select the appropriate white balance setting for the type of light that youre shooting in, and then your camera automatically adjusts the colors to eliminate any warm or cool color casts from your light source.
Use the `whiteBalance` property to specify which white balance setting the camera should use.
#### `zoom`
Value: float from `0` to `1.0`
Specifies the zoom of your camera. The value 0 is no zoom, 1 is maximum zoom. For a medium zoom, for example, you could pass `0.5`.
#### `Android` `permissionDialogTitle`
Starting on android M individual permissions must be granted for certain services, the camera is one of them, you can use this to change the title of the dialog prompt requesting permissions.
#### `Android` `permissionDialogMessage`
Starting on android M individual permissions must be granted for certain services, the camera is one of them, you can use this to change the content of the dialog prompt requesting permissions.
#### `notAuthorizedView`
By default a `Camera not authorized` message will be displayed when access to the camera has been denied, if set displays the passed react element instead of the default one.
#### `pendingAuthorizationView`
By default a <ActivityIndicator> will be displayed while the component is waiting for the user to grant/deny access to the camera, if set displays the passed react element instead of the default one.
### Native Event callbacks props
#### `onCameraReady`
Function to be called when native code emit onCameraReady event, when camera is ready.
#### `onMountError`
Function to be called when native code emit onMountError event, when there is a problem mounting the camera.
### Bar Code Related props
#### `onBarCodeRead`
Will call the specified method when a barcode is detected in the camera's view.
Event contains `data` (the data in the barcode) and `type` (the type of the barcode detected).
The following barcode types can be recognised:
- `aztec`
- `code128`
- `code39`
- `code39mod43`
- `code93`
- `ean13` (`iOS` converts `upca` barcodes to `ean13` by adding a leading 0)
- `ean8`
- `pdf417`
- `qr`
- `upce`
- `interleaved2of5` (when available)
- `itf14` (when available)
- `datamatrix` (when available)
The barcode type is provided in the `data` object.
#### `barCodeTypes`
An array of barcode types to search for. Defaults to all types listed above. No effect if `onBarCodeRead` is undefined.
Example: `<RNCamera barCodeTypes={[RNCamera.Constants.BarCodeType.qr]} />`
### Face Detection Related props
RNCamera uses the Google Mobile Vision frameworks for Face Detection, you can read more info about it [here](https://developers.google.com/android/reference/com/google/android/gms/vision/face/FaceDetector).
#### `onFacesDetected`
Method to be called when face is detected. Receives a Faces Detected Event object. The interesting value of this object is the `faces` value, which is an array with objects of the [Face](https://developers.google.com/android/reference/com/google/android/gms/vision/face/Face) properties.
#### `onFaceDetectionError`
Method to be called if there was an Face Detection Error, receives an object with the `isOperational` property set to `false` if Face Detector is NOT operational and `true`if it is.
#### `faceDetectionMode`
Values: `RNCamera.Constants.FaceDetection.Mode.fast` (default) or `RNCamera.Constants.FaceDetection.Mode.accurate`
Specifies the face detection mode of the Face Detection API.
Use `RNCamera.Constants.FaceDetection.Mode.accurate` if you want slower but more accurate results.
#### `faceDetectionLandmarks`
Values: `RNCamera.Constants.FaceDetection.Landmarks.all` or `RNCamera.Constants.FaceDetection.Landmarks.none` (default)
A landmark is a point of interest within a face. The left eye, right eye, and nose base are all examples of landmarks. The Face API provides the ability to find landmarks on a detected face.
#### `faceDetectionClassifications`
Values: `RNCamera.Constants.FaceDetection.Classifications.all` or `RNCamera.Constants.FaceDetection.Classifications.none` (default)
Classification is determining whether a certain facial characteristic is present. For example, a face can be classified with regards to whether its eyes are open or closed. Another example is whether the face is smiling or not.
#### `takePictureAsync([options]): Promise`
Takes a picture, saves in your app's cache directory and returns a promise.
Supported options:
- `quality` (float between 0 to 1.0). This property is used to compress the output jpeg file with 1 meaning no jpeg compression will be applied. If no value is specified `quality:1` is used.
- `base64` (boolean true or false) Use this with `true` if you want a base64 representation of the picture taken on the return data of your promise. If no value is specified `base64:false` is used.
- `exif` (boolean true or false) Use this with `true` if you want a exif data map of the picture taken on the return data of your promise. If no value is specified `exif:false` is used.
The promise will be fulfilled with an object with some of the following properties:
- `width`: returns the image's width (taking image orientation into account)
- `height`: returns the image's height (taking image orientation into account)
- `uri`: returns the path to the image saved on your app's cache directory.
- `base64`: returns the base64 representation of the image if required.
- `exif`: returns an exif map of the image if required.
#### `recordAsync([options]): Promise`
Records a video, saves it in your app's cache directory and returns a promise when stopRecording is called or either maxDuration or maxFileSize specified are reached.
Supported options:
- `quality`. This option specifies the quality of the video to be taken. The possible values are:
- `RNCamera.Constants.VideoQuality.2160p`.
- `ios` Specifies capture settings suitable for 2160p (also called UHD or 4K) quality (3840x2160 pixel) video output.
- `android` Quality level corresponding to the 2160p (3840x2160) resolution. (Android Lollipop and above only!).
- `RNCamera.Constants.VideoQuality.1080p`.
- `ios` Specifies capture settings suitable for 1080p quality (1920x1080 pixel) video output.
- `android` Quality level corresponding to the 1080p (1920 x 1080) resolution.
- `RNCamera.Constants.VideoQuality.720p`.
- `ios` Specifies capture settings suitable for 720p quality (1280x720 pixel) video output.
- `android` Quality level corresponding to the 720p (1280 x 720) resolution.
- `RNCamera.Constants.VideoQuality.480p`.
- `ios` Specifies capture settings suitable for VGA quality (640x480 pixel) video output.
- `android` Quality level corresponding to the 480p (720 x 480) resolution.
- `RNCamera.Constants.VideoQuality.4:3`.
- `ios` Specifies capture settings suitable for VGA quality (640x480 pixel) video output. (Same as RNCamera.Constants.VideoQuality.480p).
- `android` Quality level corresponding to the 480p (720 x 480) resolution but with video frame width set to 640.
If nothing is passed the device's highest camera quality will be used as default.
- `maxDuration` (float greater than 0). Specifies the maximum duration of the video to be recorded in seconds. If nothing is specified, no time limit will be used.
- `maxFileSize` (int greater than 0). Specifies the maximum file size, in bytes, of the video to be recorded. For 1mb, for example, use 1*1024*1024. If nothing is specified, no size limit will be used.
- `mute` (any value). If this flag is given in the option with any value, the video to be recorded will be mute. If nothing is specified, video will NOT be muted.
The promise will be fulfilled with an object with some of the following properties:
- `uri`: returns the path to the video saved on your app's cache directory.
#### `stopRecording: void`
Should be called after recordAsync() to make the promise be fulfilled and get the video uri.
#### `Android` `getSupportedRatiosAsync(): Promise`
Android only. Returns a promise. The promise will be fulfilled with an object with an array containing strings with all camera aspect ratios supported by the device.
## Subviews
This component supports subviews, so if you wish to use the camera view as a background or if you want to layout buttons/images/etc. inside the camera then you can do that.
## Example
To see more of the `RNCamera` in action, with Face Detection, you can check out the source in [RNCamera Example](https://github.com/react-native-community/rncamera-example) repository.
## Open Collective
We are just beginning a funding campaign for react-native-camera. Contributions are greatly appreciated. When we gain more than $250 we will begin distributing funds to core maintainers in a fully transparent manner. Feedback for this process is welcomed, we will continue to evolve the strategy as we grow and learn more.
### Backers
Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/react-native-camera#backer)]
<a href="https://opencollective.com/react-native-camera/backer/0/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/0/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/1/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/1/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/2/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/2/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/3/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/3/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/4/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/4/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/5/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/5/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/6/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/6/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/7/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/7/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/8/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/8/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/9/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/9/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/10/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/10/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/11/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/11/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/12/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/12/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/13/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/13/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/14/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/14/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/15/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/15/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/16/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/16/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/17/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/17/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/18/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/18/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/19/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/19/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/20/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/20/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/21/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/21/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/22/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/22/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/23/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/23/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/24/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/24/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/25/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/25/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/26/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/26/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/27/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/27/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/28/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/28/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/backer/29/website" target="_blank"><img src="https://opencollective.com/react-native-camera/backer/29/avatar.svg"></a>
### Sponsors
Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/react-native-camera#sponsor)]
<a href="https://opencollective.com/react-native-camera/sponsor/0/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/0/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/1/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/1/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/2/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/2/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/3/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/3/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/4/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/4/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/5/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/5/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/6/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/6/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/7/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/7/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/8/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/8/avatar.svg"></a>
<a href="https://opencollective.com/react-native-camera/sponsor/9/website" target="_blank"><img src="https://opencollective.com/react-native-camera/sponsor/9/avatar.svg"></a>
------------
Thanks to Brent Vatne (@brentvatne) for the `react-native-video` module which provided me with a great example of how to set up this module.

150
docs/migration.md Normal file
View File

@ -0,0 +1,150 @@
# Migrating from RCTCamera to RNCamera
## Project Integration
Please follow the [RNCamera doc](https://github.com/react-native-community/react-native-camera/blob/master/docs/RNCamera.md) installation guide to install the face detection frameworks on both platforms.
### iOS
Open your app's XCode project. Expand the Libraries folder in the project navigation and right click and delete the RCTCamera.xcodeproj.
On your project's target, on `Build Phases`, click on libRCTCamera.a and delete (press the - button below).
You can follow the installation steps for RNCamera on the readme to link the new RNCamera project to your app's XCode project.
You can do it via `react-native link` command or by the manual steps.
Before building and running again, do a complete clean on your project.
### Android
1. On the MainApplication of your Android project change the import of RCTCameraPackage line to:
```java
import org.reactnative.camera.RNCameraPackage;
```
2. Inside the getPackages() methods change `new RCTCameraPackage()` to `new RNCameraPackage()`.
3. On `android/app/build.gradle`, change the line: `compile (project(':react-native-camera'))` to:
```gradle
compile (project(':react-native-camera')) {
exclude group: "com.google.android.gms"
}
compile ("com.google.android.gms:play-services-vision:10.2.0") {
force = true;
}
```
4. Add jitpack to android/build.gradle
```gradle
allprojects {
repositories {
maven { url "https://jitpack.io" }
}
}
```
## Usage differences
### imports
Instead of importing `Camera`, now, you should import `{ RNCamera }` from `react-native-camera`.
### No `captureMode` prop
On RCTCamera, you would set the camera `captureMode` to `still` or `video` and you could only record or take a picture depending on the `captureMode` of your `Camera`.
On RNCamera you do not need to specify `captureMode`. The RNCameara, in any state, can record or take a picture calling the appropriate method.
### `capture` to `takePictureAsync` or `recordAsync`
Let's say you have a component with a RCTCamera taking a photo:
```jsx
import Camera from 'react-native-camera';
class TakePicture extends Component {
takePicture = async () => {
try {
const data = await this.camera.capture();
console.log('Path to image: ' + data.path);
} catch (err) {
// console.log('err: ', err);
}
};
render() {
return (
<View style={styles.container}>
<Camera
ref={cam => {
this.camera = cam;
}}
style={styles.preview}
aspect={Camera.constants.Aspect.fill}
captureAudio={false}
>
<View style={styles.captureContainer}>
<TouchableOpacity style={styles.capture} onPress={this.takePicture}>
<Icon style={styles.iconCamera}>camera</Icon>
<Text>Take Photo</Text>
</TouchableOpacity>
</View>
</Camera>
<View style={styles.space} />
</View>
);
}
}
```
You should change this to:
```jsx
import { RNCamera } from 'react-native-camera';
class TakePicture extends Component {
takePicture = async () => {
try {
const data = await this.camera.takePictureAsync();
console.log('Path to image: ' + data.uri);
} catch (err) {
// console.log('err: ', err);
}
};
render() {
return (
<View style={styles.container}>
<RNCamera
ref={cam => {
this.camera = cam;
}}
style={styles.preview}
>
<View style={styles.captureContainer}>
<TouchableOpacity style={styles.capture} onPress={this.takePicture}>
<Icon style={styles.iconCamera}>camera</Icon>
<Text>Take Photo</Text>
</TouchableOpacity>
</View>
</RNCamera>
<View style={styles.space} />
</View>
);
}
}
```
The same logic applies to change `capture` to `recordAsync`.
### `flashMode` and `torchMode`
In RCTCamera, there was `flashMode` and `torchMode` prop. In RNCamera, these are combined into the `flashMode` prop.
### Other differences
Take a look into the [RCTCamera doc](https://github.com/react-native-community/react-native-camera/blob/master/docs/RCTCamera.md) and the [RNCamera doc](https://github.com/react-native-community/react-native-camera/blob/master/docs/RNCamera.md) to see more differences.

View File

@ -0,0 +1,33 @@
//
// EXFaceDetectorManager.h
// RCTCamera
//
// Created by Stanisław Chmiela on 22.11.2017.
// Copyright © 2017 650 Industries. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <GoogleMobileVision/GoogleMobileVision.h>
#import <GoogleMVDataOutput/GoogleMVDataOutput.h>
@protocol RNFaceDetectorDelegate
- (void)onFacesDetected:(NSArray<NSDictionary *> *)faces;
@end
@interface RNFaceDetectorManager : NSObject
- (NSDictionary *)constantsToExport;
+ (NSDictionary *)constants;
- (instancetype)initWithSessionQueue:(dispatch_queue_t)sessionQueue delegate:(id <RNFaceDetectorDelegate>)delegate;
- (void)setIsEnabled:(id)json;
- (void)setLandmarksDetected:(id)json;
- (void)setClassificationsDetected:(id)json;
- (void)setMode:(id)json;
- (void)maybeStartFaceDetectionOnSession:(AVCaptureSession *)session withPreviewLayer:(AVCaptureVideoPreviewLayer *)previewLayer;
- (void)stopFaceDetection;
@end

View File

@ -0,0 +1,260 @@
//
// RNFaceDetectorManager.m
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import <React/RCTConvert.h>
#import "RNCamera.h"
#import "RNFaceEncoder.h"
#import "RNFaceDetectorUtils.h"
#import "RNFaceDetectorModule.h"
#import "RNFaceDetectorManager.h"
@interface RNFaceDetectorManager() <GMVDataOutputDelegate>
@property (assign, nonatomic) long previousFacesCount;
@property (nonatomic, strong) GMVDataOutput *dataOutput;
@property (nonatomic, weak) AVCaptureSession *session;
@property (nonatomic, weak) dispatch_queue_t sessionQueue;
@property (nonatomic, assign, getter=isConnected) BOOL connected;
@property (nonatomic, weak) id <RNFaceDetectorDelegate> delegate;
@property (nonatomic, weak) AVCaptureVideoPreviewLayer *previewLayer;
@property (nonatomic, assign, getter=isDetectingFaces) BOOL faceDetecting;
@property (nonatomic, strong) NSMutableDictionary<NSString *, id> *faceDetectorOptions;
@end
@implementation RNFaceDetectorManager
static NSDictionary *defaultFaceDetectorOptions = nil;
- (NSDictionary *)constantsToExport
{
return [[self class] constants];
}
+ (NSDictionary *)constants
{
return [RNFaceDetectorUtils constantsToExport];
}
- (instancetype)initWithSessionQueue:(dispatch_queue_t)sessionQueue delegate:(id <RNFaceDetectorDelegate>)delegate
{
if (self = [super init]) {
_delegate = delegate;
_previousFacesCount = -1;
_sessionQueue = sessionQueue;
_faceDetectorOptions = [[NSMutableDictionary alloc] initWithDictionary:[[self class] _getDefaultFaceDetectorOptions]];
}
return self;
}
# pragma mark Properties setters
- (void)setSession:(AVCaptureSession *)session
{
_session = session;
}
# pragma mark - JS properties setters
- (void)setIsEnabled:(id)json
{
BOOL newFaceDetecting = [RCTConvert BOOL:json];
if ([self isDetectingFaces] != newFaceDetecting) {
_faceDetecting = newFaceDetecting;
[self _runBlockIfQueueIsPresent:^{
if ([self isDetectingFaces]) {
[self tryEnablingFaceDetection];
} else {
[self stopFaceDetection];
}
}];
}
}
- (void)setLandmarksDetected:(id)json
{
[self _updateOptionSettingForKey:GMVDetectorFaceLandmarkType withJSONValue:json];
}
- (void)setClassificationsDetected:(id)json
{
[self _updateOptionSettingForKey:GMVDetectorFaceClassificationType withJSONValue:json];
}
- (void)setMode:(id)json
{
[self _updateOptionSettingForKey:GMVDetectorFaceMode withJSONValue:json];
}
# pragma mark - Public API
- (void)maybeStartFaceDetectionOnSession:(AVCaptureSession *)session withPreviewLayer:(AVCaptureVideoPreviewLayer *)previewLayer
{
_session = session;
_previewLayer = previewLayer;
[self tryEnablingFaceDetection];
}
- (void)tryEnablingFaceDetection
{
if (!_session) {
return;
}
[_session beginConfiguration];
if ([self isDetectingFaces]) {
@try {
GMVDetector *faceDetector = [GMVDetector detectorOfType:GMVDetectorTypeFace options:_faceDetectorOptions];
GMVDataOutput *dataOutput = [[GMVMultiDataOutput alloc] initWithDetector:faceDetector];
[dataOutput setDataDelegate:self];
if ([_session canAddOutput:dataOutput]) {
[_session addOutput:dataOutput];
_dataOutput = dataOutput;
_connected = true;
}
_previousFacesCount = -1;
[self _notifyOfFaces:nil];
} @catch (NSException *exception) {
RCTLogWarn(@"%@", [exception description]);
}
}
[_session commitConfiguration];
}
- (void)stopFaceDetection
{
if (!_session) {
return;
}
[_session beginConfiguration];
if ([_session.outputs containsObject:_dataOutput]) {
[_session removeOutput:_dataOutput];
[_dataOutput cleanup];
_dataOutput = nil;
_connected = false;
}
[_session commitConfiguration];
if ([self isDetectingFaces]) {
_previousFacesCount = -1;
[self _notifyOfFaces:nil];
}
}
# pragma mark Private API
- (void)_resetFaceDetector
{
[self stopFaceDetection];
[self tryEnablingFaceDetection];
}
- (void)_notifyOfFaces:(NSArray<NSDictionary *> *)faces
{
NSArray<NSDictionary *> *reportableFaces = faces == nil ? @[] : faces;
// Send event when there are faces that have been detected ([faces count] > 0)
// or if the listener may think that there are still faces in the video (_prevCount > 0)
// or if we really want the event to be sent, eg. to reset listener info (_prevCount == -1).
if ([reportableFaces count] > 0 || _previousFacesCount != 0) {
if (_delegate) {
[_delegate onFacesDetected:reportableFaces];
}
// Maybe if the delegate is not present anymore we should disable encoding,
// however this should never happen.
_previousFacesCount = [reportableFaces count];
}
}
# pragma mark - Utilities
- (long)_getLongOptionValueForKey:(NSString *)key
{
return [(NSNumber *)[_faceDetectorOptions valueForKey:key] longValue];
}
- (void)_updateOptionSettingForKey:(NSString *)key withJSONValue:(id)json
{
long requestedValue = [RCTConvert NSInteger:json];
long currentValue = [self _getLongOptionValueForKey:key];
if (requestedValue != currentValue) {
[_faceDetectorOptions setValue:@(requestedValue) forKey:key];
[self _runBlockIfQueueIsPresent:^{
[self _resetFaceDetector];
}];
}
}
- (void)_runBlockIfQueueIsPresent:(void (^)(void))block
{
if (_sessionQueue) {
dispatch_async(_sessionQueue, block);
}
}
#pragma mark - GMVDataOutputDelegate
- (void)dataOutput:(GMVDataOutput *)dataOutput didFinishedDetection:(NSArray<__kindof GMVFeature *> *)results
{
// Calling dataOutput:didFinishedDetection with dataOutput that in videoSettings has no information about
// width or height started happen after refactor: moving face detection logic from EXCameraManager to EXFaceDetectorManager.
// I suppose no information is provided because data output is already disconnected from the input and it has no
// information about the source. Let's reset the information then.
if (!_connected) {
[self _notifyOfFaces:nil];
return;
}
AVCaptureVideoOrientation interfaceVideoOrientation = _previewLayer.connection.videoOrientation;
CGAffineTransform transform = [RNFaceDetectorUtils transformFromDeviceOutput:dataOutput toInterfaceVideoOrientation:interfaceVideoOrientation];
RNFaceEncoder *faceEncoder = [[RNFaceEncoder alloc] initWithTransform:transform];
NSMutableArray<NSDictionary *> *encodedFaces = [NSMutableArray arrayWithCapacity:[results count]];
[results enumerateObjectsUsingBlock:^(GMVFeature * _Nonnull feature, NSUInteger _idx, BOOL * _Nonnull _stop) {
if([feature isKindOfClass:[GMVFaceFeature class]]) {
GMVFaceFeature *face = (GMVFaceFeature *)feature;
[encodedFaces addObject:[faceEncoder encode:face]];
}
}];
[self _notifyOfFaces:encodedFaces];
}
# pragma mark - Default options
+ (NSDictionary *)_getDefaultFaceDetectorOptions
{
if (defaultFaceDetectorOptions == nil) {
[self _initDefaultFaceDetectorOptions];
}
return defaultFaceDetectorOptions;
}
+ (void)_initDefaultFaceDetectorOptions
{
defaultFaceDetectorOptions = @{
GMVDetectorFaceTrackingEnabled : @(YES),
GMVDetectorFaceMode : @(GMVDetectorFaceFastMode),
GMVDetectorFaceLandmarkType : @(GMVDetectorFaceLandmarkNone),
GMVDetectorFaceClassificationType : @(GMVDetectorFaceClassificationNone),
GMVDetectorFaceMinSize : @(0.15)
};
}
@end

View File

@ -0,0 +1,12 @@
//
// RNFaceDetectorModule.h
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import <React/RCTBridgeModule.h>
#import <GoogleMobileVision/GoogleMobileVision.h>
@interface RNFaceDetectorModule : NSObject <RCTBridgeModule>
@end

View File

@ -0,0 +1,195 @@
//
// RNFaceDetectorModule.m
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import "RNFaceDetectorModule.h"
#import "RNFaceEncoder.h"
#import "RNFileSystem.h"
#import "RNFaceDetectorUtils.h"
static const NSString *kModeOptionName = @"mode";
static const NSString *kDetectLandmarksOptionName = @"detectLandmarks";
static const NSString *kRunClassificationsOptionName = @"runClassifications";
@implementation RNFaceDetectorModule
static NSFileManager *fileManager = nil;
static NSDictionary *defaultDetectorOptions = nil;
- (instancetype)init
{
self = [super init];
if (self) {
fileManager = [NSFileManager defaultManager];
}
return self;
}
RCT_EXPORT_MODULE(RNFaceDetector);
@synthesize bridge = _bridge;
- (void)setBridge:(RCTBridge *)bridge
{
_bridge = bridge;
}
+ (BOOL)requiresMainQueueSetup
{
return NO;
}
- (NSDictionary *)constantsToExport
{
return [RNFaceDetectorUtils constantsToExport];
}
RCT_EXPORT_METHOD(detectFaces:(nonnull NSDictionary *)options
resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject)
{
NSString *uri = options[@"uri"];
if (uri == nil) {
reject(@"E_FACE_DETECTION_FAILED", @"You must define a URI.", nil);
return;
}
NSURL *url = [NSURL URLWithString:uri];
NSString *path = [url.path stringByStandardizingPath];
// if (!([self.bridge.scopedModules.fileSystem permissionsForURI:url] & EXFileSystemPermissionRead)) {
// reject(@"E_FILESYSTEM_PERMISSIONS", [NSString stringWithFormat:@"File '%@' isn't readable.", uri], nil);
// return;
// }
@try {
GMVDetector *detector = [[self class] detectorForOptions:options];
if (![fileManager fileExistsAtPath:path]) {
reject(@"E_FACE_DETECTION_FAILED", [NSString stringWithFormat:@"The file does not exist. Given path: `%@`.", path], nil);
return;
}
UIImage *image = [[UIImage alloc] initWithContentsOfFile:path];
NSDictionary *detectionOptions = [[self class] detectionOptionsForImage:image];
NSArray<GMVFaceFeature *> *faces = [detector featuresInImage:image options:detectionOptions];
RNFaceEncoder *faceEncoder = [[RNFaceEncoder alloc] init];
NSMutableArray<NSDictionary *> *encodedFaces = [NSMutableArray arrayWithCapacity:[faces count]];
[faces enumerateObjectsUsingBlock:^(GMVFaceFeature * _Nonnull face, NSUInteger _idx, BOOL * _Nonnull _stop) {
[encodedFaces addObject:[faceEncoder encode:face]];
}];
resolve(@{
@"faces" : encodedFaces,
@"image" : @{
@"uri" : options[@"uri"],
@"width" : @(image.size.width),
@"height" : @(image.size.height),
@"orientation" : @([RNFaceDetectorModule exifOrientationFor:image.imageOrientation])
}
});
} @catch (NSException *exception) {
reject(@"E_FACE_DETECTION_FAILED", [exception description], nil);
}
}
+ (GMVDetector *)detectorForOptions:(NSDictionary *)options
{
NSMutableDictionary *parsedOptions = [[NSMutableDictionary alloc] initWithDictionary:[self getDefaultDetectorOptions]];
if (options[kDetectLandmarksOptionName]) {
[parsedOptions setObject:options[kDetectLandmarksOptionName] forKey:GMVDetectorFaceLandmarkType];
}
if (options[kModeOptionName]) {
[parsedOptions setObject:options[kModeOptionName] forKey:GMVDetectorFaceMode];
}
if (options[kRunClassificationsOptionName]) {
[parsedOptions setObject:options[kRunClassificationsOptionName] forKey:GMVDetectorFaceClassificationType];
}
return [GMVDetector detectorOfType:GMVDetectorTypeFace options:parsedOptions];
}
# pragma mark: - Detector default options getter and initializer
+ (NSDictionary *)getDefaultDetectorOptions
{
if (defaultDetectorOptions == nil) {
[self initDefaultDetectorOptions];
}
return defaultDetectorOptions;
}
+ (void)initDefaultDetectorOptions
{
defaultDetectorOptions = @{
GMVDetectorFaceMode : @(GMVDetectorFaceAccurateMode),
GMVDetectorFaceLandmarkType : @(GMVDetectorFaceLandmarkAll),
GMVDetectorFaceClassificationType : @(GMVDetectorFaceClassificationAll)
};
}
# pragma mark: - Utility methods
+ (NSDictionary *)detectionOptionsForImage:(UIImage *)image
{
return @{
GMVDetectorImageOrientation : @([[self class] gmvImageOrientationFor:image.imageOrientation]),
};
}
// As the documentation (http://cocoadocs.org/docsets/GoogleMobileVision/1.0.2/Constants/GMVImageOrientation.html) suggests
// the value of GMVImageOrientation is the same as the value defined by EXIF specifications, so we can adapt
// https://gist.github.com/steipete/4666527 to our needs.
+ (GMVImageOrientation)gmvImageOrientationFor:(UIImageOrientation)orientation
{
switch (orientation) {
case UIImageOrientationUp:
return GMVImageOrientationTopLeft;
case UIImageOrientationDown:
return GMVImageOrientationBottomRight;
case UIImageOrientationLeft:
return GMVImageOrientationLeftBottom;
case UIImageOrientationRight:
return GMVImageOrientationRightTop;
case UIImageOrientationUpMirrored:
return GMVImageOrientationTopRight;
case UIImageOrientationDownMirrored:
return GMVImageOrientationBottomLeft;
case UIImageOrientationLeftMirrored:
return GMVImageOrientationLeftTop;
case UIImageOrientationRightMirrored:
return GMVImageOrientationRightBottom;
}
}
// https://gist.github.com/steipete/4666527
+ (int)exifOrientationFor:(UIImageOrientation)orientation
{
switch (orientation) {
case UIImageOrientationUp:
return 1;
case UIImageOrientationDown:
return 3;
case UIImageOrientationLeft:
return 8;
case UIImageOrientationRight:
return 6;
case UIImageOrientationUpMirrored:
return 2;
case UIImageOrientationDownMirrored:
return 4;
case UIImageOrientationLeftMirrored:
return 5;
case UIImageOrientationRightMirrored:
return 7;
}
}
@end

View File

@ -0,0 +1,17 @@
//
// RNFaceDetectorPointTransformCalculator.h
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
@interface RNFaceDetectorPointTransformCalculator : NSObject
- (instancetype)initToTransformFromOrientation:(AVCaptureVideoOrientation)orientation toOrientation:(AVCaptureVideoOrientation)toOrientation forVideoWidth:(CGFloat)videoWidth andVideoHeight:(CGFloat)videoHeight;
- (CGAffineTransform)transform;
@end

View File

@ -0,0 +1,277 @@
//
// RNFaceDetectorPointTransformCalculator.m
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import "RNFaceDetectorPointTransformCalculator.h"
#define cDefaultFloatComparisonEpsilon 0.0001
#define cModEqualFloatsWithEpsilon(dividend, divisor, modulo, epsilon) \
fabs( fmod(dividend, divisor) - modulo ) < epsilon
#define cModEqualFloats(dividend, divisor, modulo) \
cModEqualFloatsWithEpsilon(dividend, divisor, modulo, cDefaultFloatComparisonEpsilon)
/*
* The purpose of this class is to calculate the transform used to translate
* face detected by Google Mobile Vision to proper view coordinates.
*
* When an Expo app locks interface orientatation in `app.json` or with `ScreenOrientation.allow`,
* interface gets locked, but device orientation still can change. It looks like Google Mobile Vision
* listens to device orientation changes and transforms coordinates of faces as if the device orientation
* always equals interface orientation (which in Expo is not the case).
*
* Let's see the behavior on a specific example. Imagine an app with screen orientation locked to portrait.
*
* ```
* +---+
* |^^ | // by ^^ we shall denote a happy face, ^^
* | |
* | |
* +---+
* - // by - we shall denote the bottom of the interface.
* ```
*
* When the device is being held like this face is properly reported in (0, 0).
* However, when we rotate the device to landscape, the situation looks like this:
*
* ```
* +---------------+
* |^^ x| // by xx we shall where the face should by according to GMV detector.
* || x| // note that interface is still portrait-oriented
* | |
* +---------------+
* ```
*
* For GMV, which thinks that the interface is in landscape (`UIDeviceOrientation` changed to landscape)
* the face is in `(0, 0)`. However, for our app `(0, 0)` is in the top left corner of the device --
* -- that's where the face indicator gets positioned.
*
* That's when we have to rotate and translate the face indicator. Here we have to rotate it by -90 degrees.
*
* ```
* +---------------+
* |^^ |xx // something is still wrong
* || |
* | |
* +---------------+
* ```
*
* Not only must we rotate the indicator, we also have to translate it. Here by (-videoWidth, 0).
*
* ```
* +---------------+
* |** | // detected eyes glow inside the face indicator
* || |
* | |
* +---------------+
* ```
*
* Fixing this issue is the purpose of this whole class.
*
*/
typedef NS_ENUM(NSInteger, RNTranslationEnum) {
RNTranslateYNegativeWidth,
RNTranslateXNegativeHeight,
RNTranslateXYNegative,
RNTranslateYXNegative
};
@interface RNFaceDetectorPointTransformCalculator()
@property (assign, nonatomic) AVCaptureVideoOrientation fromOrientation;
@property (assign, nonatomic) AVCaptureVideoOrientation toOrientation;
@property (assign, nonatomic) CGFloat videoWidth;
@property (assign, nonatomic) CGFloat videoHeight;
@end
@implementation RNFaceDetectorPointTransformCalculator
- (instancetype)initToTransformFromOrientation:(AVCaptureVideoOrientation)fromOrientation toOrientation:(AVCaptureVideoOrientation)toOrientation forVideoWidth:(CGFloat)videoWidth andVideoHeight:(CGFloat)videoHeight
{
self = [super init];
if (self) {
_fromOrientation = fromOrientation;
_toOrientation = toOrientation;
_videoWidth = videoWidth;
_videoHeight = videoHeight;
}
return self;
}
- (CGFloat)rotation
{
if (_fromOrientation == _toOrientation) {
return 0;
}
AVCaptureVideoOrientation firstOrientation = MIN(_fromOrientation, _toOrientation);
AVCaptureVideoOrientation secondOrientation = MAX(_fromOrientation, _toOrientation);
CGFloat angle = [[[self class] getRotationDictionary][@(firstOrientation)][@(secondOrientation)] doubleValue];
/*
* It turns out that if you need to rotate the indicator by -90 degrees to get it from
* landscape left (Device orientation) to portrait (Interface Orientation),
* to get the indicator from portrait (D) to landscape left (I), you need to rotate it by 90 degrees.
* Same analogy `r(1, 2) == x <==> r(2, 1) == -x` is true for every other transformation.
*/
if (_fromOrientation > _toOrientation) {
angle = -angle;
}
return angle;
}
- (CGPoint)translation
{
if (_fromOrientation == _toOrientation) {
return CGPointZero;
}
AVCaptureVideoOrientation firstOrientation = MIN(_fromOrientation, _toOrientation);
AVCaptureVideoOrientation secondOrientation = MAX(_fromOrientation, _toOrientation);
RNTranslationEnum enumValue = [[[self class] getTranslationDictionary][@(firstOrientation)][@(secondOrientation)] intValue];
CGPoint translation = [self translationForEnum:enumValue];
/*
* Here the analogy is a little bit more complicated than when calculating rotation.
* It turns out that if you need to translate the _rotated_ indicator
* from landscape left (D) to portrait (I) by `(-videoWidth, 0)` (see top class comment),
* to translate the rotated indicator from portrait (D) to landscape left (D) you need to translate it
* by `(0, -videoWidth)`.
*
* ```
* +-------+
* +--------------------+ |^^ | // ^^ == happy face
* |^^ | | |
* | | | |
* | | | || // | or - == bottom of the interface
* | | | |
* | | |x | // xx == initial face indicator
* +--------------------+ |x |
* - +-------+
* oo // oo == rotated face indicator
* ```
*
* As we can see, the indicator has to be translated by `(0, -videoWidth)` to match with the happy face.
*
* It turns out, that `(0, -videoWidth) == translation(device: 1, interface: 4)` can be calculated by
* rotating `translation(device: 4, interface: 1) == (-videoWidth, 0)` by `rotation(4, 1) == -90deg`.
*
* One might think that the same analogy `t(1, 2) == r(2, 1)[t(2, 1)]` works always,
* but here this assumption would be wrong. The analogy works only when device and interface rotations
* differ by 90 or -90 degrees.
*
* Otherwise (when transforming from/to portrait/upside or landscape left/right)
* `translation(1, 2) == translation(2, 1).
*/
if (_fromOrientation > _toOrientation) {
CGFloat translationRotationAngle = [self rotation];
if (cModEqualFloats(translationRotationAngle + M_PI, M_PI, M_PI_2)) {
CGAffineTransform transform = CGAffineTransformIdentity;
transform = CGAffineTransformRotate(transform, translationRotationAngle);
translation = CGPointApplyAffineTransform(translation, transform);
}
}
return translation;
}
- (CGAffineTransform)transform
{
CGAffineTransform transform = CGAffineTransformIdentity;
CGFloat rotation = [self rotation];
transform = CGAffineTransformRotate(transform, rotation);
CGPoint translation = [self translation];
transform = CGAffineTransformTranslate(transform, translation.x, translation.y);
return transform;
}
# pragma mark - Enum conversion
- (CGPoint)translationForEnum:(RNTranslationEnum)enumValue
{
switch (enumValue) {
case RNTranslateXNegativeHeight:
return CGPointMake(-_videoHeight, 0);
case RNTranslateYNegativeWidth:
return CGPointMake(0, -_videoWidth);
case RNTranslateXYNegative:
return CGPointMake(-_videoWidth, -_videoHeight);
case RNTranslateYXNegative:
return CGPointMake(-_videoHeight, -_videoWidth);
}
}
# pragma mark - Lookup tables
static NSDictionary<NSNumber *, NSDictionary<NSNumber *, NSNumber *> *> *rotationDictionary = nil;
static NSDictionary<NSNumber *, NSDictionary<NSNumber *, NSNumber *> *> *translationDictionary = nil;
+ (NSDictionary<NSNumber *, NSDictionary<NSNumber *, NSNumber *> *> *) getRotationDictionary
{
if (rotationDictionary == nil) {
[self initRotationDictionary];
}
return rotationDictionary;
}
+ (NSDictionary<NSNumber *, NSDictionary<NSNumber *, NSNumber *> *> *) getTranslationDictionary
{
if (translationDictionary == nil) {
[self initTranslationDictionary];
}
return translationDictionary;
}
# pragma mark - Initialize dictionaries
// If you wonder why this dictionary is half-empty, see comment inside `- (CGFloat)rotation`. It may help you.
+ (void)initRotationDictionary
{
rotationDictionary = @{
@(AVCaptureVideoOrientationPortrait): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(M_PI_2),
@(AVCaptureVideoOrientationLandscapeRight) : @(-M_PI_2),
@(AVCaptureVideoOrientationPortraitUpsideDown) : @(M_PI),
},
@(AVCaptureVideoOrientationPortraitUpsideDown): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(-M_PI_2),
@(AVCaptureVideoOrientationLandscapeRight) : @(M_PI_2)
},
@(AVCaptureVideoOrientationLandscapeRight): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(M_PI)
}
};
}
// If you wonder why this dictionary is half-empty, see comment inside `- (CGPoint)translation`. It may help you.
+ (void)initTranslationDictionary
{
translationDictionary = @{
@(AVCaptureVideoOrientationPortrait): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(RNTranslateYNegativeWidth),
@(AVCaptureVideoOrientationLandscapeRight) : @(RNTranslateXNegativeHeight),
@(AVCaptureVideoOrientationPortraitUpsideDown) : @(RNTranslateYXNegative)
},
@(AVCaptureVideoOrientationPortraitUpsideDown): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(RNTranslateXNegativeHeight),
@(AVCaptureVideoOrientationLandscapeRight) : @(RNTranslateYNegativeWidth)
},
@(AVCaptureVideoOrientationLandscapeRight): @{
@(AVCaptureVideoOrientationLandscapeLeft) : @(RNTranslateXYNegative)
}
};
}
@end

View File

@ -0,0 +1,35 @@
//
// RNFaceDetectorUtils.h
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import <UIKit/UIKit.h>
#import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <GoogleMVDataOutput/GoogleMVDataOutput.h>
typedef NS_ENUM(NSInteger, RNFaceDetectionMode) {
RNFaceDetectionFastMode = GMVDetectorFaceFastMode,
RNFaceDetectionAccurateMode = GMVDetectorFaceAccurateMode
};
typedef NS_ENUM(NSInteger, RNFaceDetectionLandmarks) {
RNFaceDetectAllLandmarks = GMVDetectorFaceLandmarkAll,
RNFaceDetectNoLandmarks = GMVDetectorFaceLandmarkNone
};
typedef NS_ENUM(NSInteger, RNFaceDetectionClassifications) {
RNFaceRunAllClassifications = GMVDetectorFaceClassificationAll,
RNFaceRunNoClassifications = GMVDetectorFaceClassificationNone
};
@interface RNFaceDetectorUtils : NSObject
+ (NSDictionary *)constantsToExport;
+ (CGAffineTransform)transformFromDeviceOutput:(GMVDataOutput *)dataOutput toInterfaceVideoOrientation:(AVCaptureVideoOrientation)interfaceVideoOrientation;
@end

View File

@ -0,0 +1,77 @@
//
// RNFaceDetectorUtils.m
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import "RNCameraUtils.h"
#import "RNFaceDetectorUtils.h"
#import "RNFaceDetectorPointTransformCalculator.h"
NSString *const RNGMVDataOutputWidthKey = @"Width";
NSString *const RNGMVDataOutputHeightKey = @"Height";
@implementation RNFaceDetectorUtils
+ (NSDictionary *)constantsToExport
{
return @{
@"Mode" : @{
@"fast" : @(RNFaceDetectionFastMode),
@"accurate" : @(RNFaceDetectionAccurateMode)
},
@"Landmarks" : @{
@"all" : @(RNFaceDetectAllLandmarks),
@"none" : @(RNFaceDetectNoLandmarks)
},
@"Classifications" : @{
@"all" : @(RNFaceRunAllClassifications),
@"none" : @(RNFaceRunNoClassifications)
}
};
}
# pragma mark - GMVDataOutput transformations
+ (CGAffineTransform)transformFromDeviceVideoOrientation:(AVCaptureVideoOrientation)deviceVideoOrientation toInterfaceVideoOrientation:(AVCaptureVideoOrientation)interfaceVideoOrientation videoWidth:(NSNumber *)width videoHeight:(NSNumber *)height
{
RNFaceDetectorPointTransformCalculator *calculator = [[RNFaceDetectorPointTransformCalculator alloc] initToTransformFromOrientation:deviceVideoOrientation toOrientation:interfaceVideoOrientation forVideoWidth:[width floatValue] andVideoHeight:[height floatValue]];
return [calculator transform];
}
// Normally we would use `dataOutput.xScale`, `.yScale` and `.offset`.
// Unfortunately, it turns out that using these attributes results in different results
// on iPhone {6, 7} and iPhone 5S. On newer iPhones the transform works properly,
// whereas on iPhone 5S the scale is too big (~0.7, while it should be ~0.4) and the offset
// moves the face points away. This workaround (using screen + orientation + video resolution
// to calculate proper scale) has been proven to work all three devices.
+ (CGAffineTransform)transformFromDeviceOutput:(GMVDataOutput *)dataOutput withInterfaceOrientation:(AVCaptureVideoOrientation)interfaceVideoOrientation
{
UIScreen *mainScreen = [UIScreen mainScreen];
BOOL interfaceIsLandscape = interfaceVideoOrientation == AVCaptureVideoOrientationLandscapeLeft || interfaceVideoOrientation == AVCaptureVideoOrientationLandscapeRight;
CGFloat interfaceWidth = interfaceIsLandscape ? mainScreen.bounds.size.height : mainScreen.bounds.size.width;
CGFloat interfaceHeight = interfaceIsLandscape ? mainScreen.bounds.size.width : mainScreen.bounds.size.height;
CGFloat xScale = interfaceWidth / [(NSNumber *)dataOutput.videoSettings[RNGMVDataOutputHeightKey] floatValue];
CGFloat yScale = interfaceHeight / [(NSNumber *)dataOutput.videoSettings[RNGMVDataOutputWidthKey] floatValue];
CGAffineTransform dataOutputTransform = CGAffineTransformIdentity;
dataOutputTransform = CGAffineTransformScale(dataOutputTransform, xScale, yScale);
return dataOutputTransform;
}
+ (CGAffineTransform)transformFromDeviceOutput:(GMVDataOutput *)dataOutput toInterfaceVideoOrientation:(AVCaptureVideoOrientation)interfaceVideoOrientation
{
UIDeviceOrientation currentDeviceOrientation = [[UIDevice currentDevice] orientation];
AVCaptureVideoOrientation deviceVideoOrientation = [RNCameraUtils videoOrientationForDeviceOrientation:currentDeviceOrientation];
NSNumber *videoWidth = dataOutput.videoSettings[RNGMVDataOutputWidthKey];
NSNumber *videoHeight = dataOutput.videoSettings[RNGMVDataOutputHeightKey];
CGAffineTransform interfaceTransform = [self transformFromDeviceVideoOrientation:deviceVideoOrientation toInterfaceVideoOrientation:interfaceVideoOrientation videoWidth:videoWidth videoHeight:videoHeight];
CGAffineTransform dataOutputTransform = [self transformFromDeviceOutput:dataOutput withInterfaceOrientation:interfaceVideoOrientation];
return CGAffineTransformConcat(interfaceTransform, dataOutputTransform);
}
@end

View File

@ -0,0 +1,17 @@
//
// RNFaceEncoder.h
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import <UIKit/UIKit.h>
#import <GoogleMobileVision/GoogleMobileVision.h>
@interface RNFaceEncoder : NSObject
- (instancetype)initWithTransform:(CGAffineTransform)transform;
- (NSDictionary *)encode:(GMVFaceFeature *)face;
@end

View File

@ -0,0 +1,119 @@
//
// RNFaceEncoder.m
// RCTCamera
//
// Created by Joao Guilherme Daros Fidelis on 21/01/18.
//
#import "RNFaceEncoder.h"
#define cDefaultFloatComparisonEpsilon 0.0001
#define cModEqualFloatsWithEpsilon(dividend, divisor, modulo, epsilon) \
fabs( fmod(dividend, divisor) - modulo ) < epsilon
#define cModEqualFloats(dividend, divisor, modulo) \
cModEqualFloatsWithEpsilon(dividend, divisor, modulo, cDefaultFloatComparisonEpsilon)
@interface RNFaceEncoder()
@property (assign, nonatomic) BOOL swapWidthAndHeight;
@property (assign, nonatomic) CGAffineTransform transform;
@property (assign, nonatomic) CGFloat rollAngleDegreesFromTransform;
@end
@implementation RNFaceEncoder
- (instancetype)init
{
return [self initWithTransform:CGAffineTransformIdentity];
}
- (instancetype)initWithTransform:(CGAffineTransform)transform
{
self = [super init];
if (self) {
_transform = transform;
_rollAngleDegreesFromTransform = [self radianAngleToDegrees:[self rollAngleFromTransform:_transform]];
_swapWidthAndHeight = cModEqualFloats(_rollAngleDegreesFromTransform + 360, 180, 90);
}
return self;
}
- (NSDictionary *)encode:(GMVFaceFeature *)face
{
CGRect bounds = CGRectApplyAffineTransform(face.bounds, _transform);
NSDictionary *initialDictionary = @{
@"bounds" : @{
@"size" : @{
@"width" : @(_swapWidthAndHeight ? bounds.size.height : bounds.size.width),
@"height" : @(_swapWidthAndHeight ? bounds.size.width : bounds.size.height)
},
@"origin" : @{
@"x" : @(bounds.origin.x),
@"y" : @(bounds.origin.y)
}
}
};
NSMutableDictionary *encodedFace = [[NSMutableDictionary alloc] initWithDictionary:initialDictionary];
[self putAFloat:face.smilingProbability forKey:@"smilingProbability" toDictionary:encodedFace ifValueIsValid:face.hasSmilingProbability];
[self putAnInteger:face.trackingID forKey:@"faceID" toDictionary:encodedFace ifValueIsValid:face.hasTrackingID];
[self putAPoint:face.leftEarPosition forKey:@"leftEarPosition" toDictionary:encodedFace ifValueIsValid:face.hasLeftEarPosition];
[self putAPoint:face.rightEarPosition forKey:@"rightEarPosition" toDictionary:encodedFace ifValueIsValid:face.hasRightEarPosition];
[self putAPoint:face.leftEyePosition forKey:@"leftEyePosition" toDictionary:encodedFace ifValueIsValid:face.hasLeftEyePosition];
[self putAFloat:face.leftEyeOpenProbability forKey:@"leftEyeOpenProbability" toDictionary:encodedFace ifValueIsValid:face.hasLeftEyeOpenProbability];
[self putAPoint:face.rightEyePosition forKey:@"rightEyePosition" toDictionary:encodedFace ifValueIsValid:face.hasRightEyePosition];
[self putAFloat:face.rightEyeOpenProbability forKey:@"rightEyeOpenProbability" toDictionary:encodedFace ifValueIsValid:face.hasRightEyeOpenProbability];
[self putAPoint:face.leftCheekPosition forKey:@"leftCheekPosition" toDictionary:encodedFace ifValueIsValid:face.hasLeftCheekPosition];
[self putAPoint:face.rightCheekPosition forKey:@"rightCheekPosition" toDictionary:encodedFace ifValueIsValid:face.hasRightCheekPosition];
[self putAPoint:face.leftMouthPosition forKey:@"leftMouthPosition" toDictionary:encodedFace ifValueIsValid:face.hasLeftMouthPosition];
[self putAPoint:face.mouthPosition forKey:@"mouthPosition" toDictionary:encodedFace ifValueIsValid:face.hasMouthPosition];
[self putAPoint:face.rightMouthPosition forKey:@"rightMouthPosition" toDictionary:encodedFace ifValueIsValid:face.hasRightMouthPosition];
[self putAPoint:face.bottomMouthPosition forKey:@"bottomMouthPosition" toDictionary:encodedFace ifValueIsValid:face.hasBottomMouthPosition];
[self putAPoint:face.noseBasePosition forKey:@"noseBasePosition" toDictionary:encodedFace ifValueIsValid:face.hasNoseBasePosition];
[self putAFloat:face.headEulerAngleY forKey:@"yawAngle" toDictionary:encodedFace ifValueIsValid:face.hasHeadEulerAngleY];
[self putAFloat:-(face.headEulerAngleZ - _rollAngleDegreesFromTransform) forKey:@"rollAngle" toDictionary:encodedFace ifValueIsValid:face.hasHeadEulerAngleZ];
return encodedFace;
}
- (void)putAPoint:(CGPoint)point forKey:(NSString *)key toDictionary:(NSMutableDictionary *)dictionary ifValueIsValid:(BOOL)pointIsValid
{
if (pointIsValid) {
CGPoint transformedPoint = CGPointApplyAffineTransform(point, _transform);
[dictionary setObject:@{ @"x" : @(transformedPoint.x), @"y" : @(transformedPoint.y) } forKey:key];
}
}
- (void)putAFloat:(CGFloat)value forKey:(NSString *)key toDictionary:(NSMutableDictionary *)dictionary ifValueIsValid:(BOOL)floatIsValid
{
if (floatIsValid) {
[dictionary setObject:@(value) forKey:key];
}
}
- (void)putAnInteger:(NSUInteger)value forKey:(NSString *)key toDictionary:(NSMutableDictionary *)dictionary ifValueIsValid:(BOOL)integerIsValid
{
if (integerIsValid) {
[dictionary setObject:@(value) forKey:key];
}
}
- (CGFloat)rollAngleFromTransform:(CGAffineTransform)transform
{
return atan2f(transform.b, transform.a);
}
- (CGFloat)radianAngleToDegrees:(CGFloat)angle
{
return angle * (180 / M_PI);
}
@end

View File

@ -181,9 +181,11 @@
- (void)changePreviewOrientation:(NSInteger)orientation
{
dispatch_async(self.manager.sessionQueue, ^{
dispatch_async(dispatch_get_main_queue(), ^{
if (self.manager.previewLayer.connection.isVideoOrientationSupported) {
self.manager.previewLayer.connection.videoOrientation = orientation;
}
});
});
}

View File

@ -16,7 +16,8 @@ typedef NS_ENUM(NSInteger, RCTCameraCaptureSessionPreset) {
RCTCameraCaptureSessionPresetPhoto = 3,
RCTCameraCaptureSessionPreset480p = 4,
RCTCameraCaptureSessionPreset720p = 5,
RCTCameraCaptureSessionPreset1080p = 6
RCTCameraCaptureSessionPreset1080p = 6,
RCTCameraCaptureSessionPreset4k = 7
};
typedef NS_ENUM(NSInteger, RCTCameraCaptureMode) {
@ -71,6 +72,7 @@ typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
@property (nonatomic, assign) NSInteger videoTarget;
@property (nonatomic, assign) NSInteger orientation;
@property (nonatomic, assign) BOOL mirrorImage;
@property (nonatomic, assign) BOOL cropToPreview;
@property (nonatomic, strong) NSArray* barCodeTypes;
@property (nonatomic, strong) RCTPromiseResolveBlock videoResolve;
@property (nonatomic, strong) RCTPromiseRejectBlock videoReject;
@ -88,6 +90,7 @@ typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
- (void)stopSession;
- (void)focusAtThePoint:(CGPoint) atPoint;
- (void)zoom:(CGFloat)velocity reactTag:(NSNumber *)reactTag;
- (void)setZoom;
@end

View File

@ -106,7 +106,9 @@ RCT_EXPORT_MODULE();
@"720p": @(RCTCameraCaptureSessionPreset720p),
@"AVCaptureSessionPreset1280x720": @(RCTCameraCaptureSessionPreset720p),
@"1080p": @(RCTCameraCaptureSessionPreset1080p),
@"AVCaptureSessionPreset1920x1080": @(RCTCameraCaptureSessionPreset1080p)
@"AVCaptureSessionPreset1920x1080": @(RCTCameraCaptureSessionPreset1080p),
@"4k": @(RCTCameraCaptureSessionPreset4k),
@"AVCaptureSessionPreset3840x2160": @(RCTCameraCaptureSessionPreset4k)
},
@"CaptureTarget": @{
@"memory": @(RCTCameraCaptureTargetMemory),
@ -156,6 +158,9 @@ RCT_CUSTOM_VIEW_PROPERTY(captureQuality, NSInteger, RCTCamera) {
case RCTCameraCaptureSessionPresetPhoto:
qualityString = AVCaptureSessionPresetPhoto;
break;
case RCTCameraCaptureSessionPreset4k:
qualityString = AVCaptureSessionPreset3840x2160;
break;
case RCTCameraCaptureSessionPreset1080p:
qualityString = AVCaptureSessionPreset1920x1080;
break;
@ -293,6 +298,10 @@ RCT_CUSTOM_VIEW_PROPERTY(mirrorImage, BOOL, RCTCamera) {
self.mirrorImage = [RCTConvert BOOL:json];
}
RCT_CUSTOM_VIEW_PROPERTY(cropToPreview, BOOL, RCTCamera) {
self.cropToPreview = [RCTConvert BOOL:json];
}
RCT_CUSTOM_VIEW_PROPERTY(barCodeTypes, NSArray, RCTCamera) {
self.barCodeTypes = [RCTConvert NSArray:json];
}
@ -378,6 +387,28 @@ RCT_EXPORT_METHOD(capture:(NSDictionary *)options
}
}
RCT_EXPORT_METHOD(stopPreview) {
#if TARGET_IPHONE_SIMULATOR
return;
#endif
dispatch_async(self.sessionQueue, ^{
if ([self.session isRunning]) {
[self.session stopRunning];
}
});
}
RCT_EXPORT_METHOD(startPreview) {
#if TARGET_IPHONE_SIMULATOR
return;
#endif
dispatch_async(self.sessionQueue, ^{
if (![self.session isRunning]) {
[self.session startRunning];
}
});
}
RCT_EXPORT_METHOD(stopCapture) {
if (self.movieFileOutput.recording) {
[self.movieFileOutput stopRecording];
@ -421,6 +452,20 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
resolve(@(device.hasFlash));
}
RCT_EXPORT_METHOD(setZoom:(CGFloat)zoomFactor) {
if (isnan(zoomFactor)) {
return;
}
NSError *error = nil;
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
if ([device lockForConfiguration:&error]) {
device.videoZoomFactor = zoomFactor;
[device unlockForConfiguration];
} else {
NSLog(@"error: %@", error);
}
}
- (void)startSession {
#if TARGET_IPHONE_SIMULATOR
return;
@ -589,7 +634,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
UIGraphicsEndImageContext();
NSData *imageData = UIImageJPEGRepresentation(image, 1.0);
[self saveImage:imageData target:target metadata:nil resolve:resolve reject:reject];
[self saveImage:imageData imageSize:size target:target metadata:nil resolve:resolve reject:reject];
#else
[[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:orientation];
@ -606,6 +651,9 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
// create cgimage
CGImageRef cgImage = CGImageSourceCreateImageAtIndex(source, 0, NULL);
// setup viewport size before using
CGSize viewportSize;
// Rotate it
CGImageRef rotatedCGImage;
if ([options objectForKey:@"rotation"]) {
@ -635,6 +683,21 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
rotatedCGImage = cgImage;
}
// Crop it
if (self.cropToPreview) {
if (UIInterfaceOrientationIsPortrait([[UIApplication sharedApplication] statusBarOrientation]))
{
viewportSize = CGSizeMake(self.previewLayer.frame.size.height, self.previewLayer.frame.size.width);
} else {
viewportSize = CGSizeMake(self.previewLayer.frame.size.width, self.previewLayer.frame.size.height);
}
CGRect captureRect = CGRectMake(0, 0, CGImageGetWidth(rotatedCGImage), CGImageGetHeight(rotatedCGImage));
CGRect croppedSize = AVMakeRectWithAspectRatioInsideRect(viewportSize, captureRect);
rotatedCGImage = CGImageCreateWithImageInRect(rotatedCGImage, croppedSize);
}
// Erase stupid TIFF stuff
[imageMetadata removeObjectForKey:(NSString *)kCGImagePropertyTIFFDictionary];
@ -649,9 +712,18 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
CGImageDestinationAddImage(destination, rotatedCGImage, (CFDictionaryRef) imageMetadata);
// And write
CGImageDestinationFinalize(destination);
CGSize frameSize;
if (UIInterfaceOrientationIsPortrait([[UIApplication sharedApplication] statusBarOrientation]))
{
frameSize = CGSizeMake(CGImageGetHeight(rotatedCGImage),
CGImageGetWidth(rotatedCGImage));
} else {
frameSize = CGSizeMake(CGImageGetWidth(rotatedCGImage),
CGImageGetHeight(rotatedCGImage));
}
CFRelease(destination);
[self saveImage:rotatedImageData target:target metadata:imageMetadata resolve:resolve reject:reject];
[self saveImage:rotatedImageData imageSize:frameSize target:target metadata:imageMetadata resolve:resolve reject:reject];
CGImageRelease(rotatedCGImage);
}
@ -664,7 +736,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
}
- (void)saveImage:(NSData*)imageData target:(NSInteger)target metadata:(NSDictionary *)metadata resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject {
- (void)saveImage:(NSData*)imageData imageSize:(CGSize)imageSize target:(NSInteger)target metadata:(NSDictionary *)metadata resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject {
NSString *responseString;
if (target == RCTCameraCaptureTargetMemory) {
@ -703,7 +775,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
}];
return;
}
resolve(@{@"path":responseString});
resolve(@{@"path":responseString, @"width":[NSNumber numberWithFloat:imageSize.width], @"height":[NSNumber numberWithFloat:imageSize.height]});
}
- (CGImageRef)newCGImageRotatedByAngle:(CGImageRef)imgRef angle:(CGFloat)angle
@ -846,7 +918,14 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
if (error) {
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
return;
} else if (assetURL == nil) {
//It's possible for writing to camera roll to fail,
//without receiving an error message, but assetURL will be nil
//Happens when disk is (almost) full
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Not enough storage"));
return;
}
[videoInfo setObject:[assetURL absoluteString] forKey:@"path"];
self.videoResolve(videoInfo);
}];
@ -1045,11 +1124,15 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
{
#if !(TARGET_IPHONE_SIMULATOR)
if (quality) {
[self.session beginConfiguration];
if ([self.session canSetSessionPreset:quality]) {
self.session.sessionPreset = quality;
}
[self.session commitConfiguration];
dispatch_async([self sessionQueue], ^{
[self.session beginConfiguration];
if ([self.session canSetSessionPreset:quality]) {
self.session.sessionPreset = quality;
} else {
self.session.sessionPreset = AVCaptureSessionPresetHigh;
}
[self.session commitConfiguration];
});
}
#endif
}

55
ios/RN/RNCamera.h Normal file
View File

@ -0,0 +1,55 @@
#import <AVFoundation/AVFoundation.h>
#import <React/RCTBridge.h>
#import <React/RCTBridgeModule.h>
#import <UIKit/UIKit.h>
#if __has_include("RNFaceDetectorManager.h")
#import "RNFaceDetectorManager.h"
#else
#import "RNFaceDetectorManagerStub.h"
#endif
@class RNCamera;
@interface RNCamera : UIView <AVCaptureMetadataOutputObjectsDelegate, AVCaptureFileOutputRecordingDelegate, RNFaceDetectorDelegate>
@property(nonatomic, strong) dispatch_queue_t sessionQueue;
@property(nonatomic, strong) AVCaptureSession *session;
@property(nonatomic, strong) AVCaptureDeviceInput *videoCaptureDeviceInput;
@property(nonatomic, strong) AVCaptureStillImageOutput *stillImageOutput;
@property(nonatomic, strong) AVCaptureMovieFileOutput *movieFileOutput;
@property(nonatomic, strong) AVCaptureMetadataOutput *metadataOutput;
@property(nonatomic, strong) id runtimeErrorHandlingObserver;
@property(nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
@property(nonatomic, strong) NSArray *barCodeTypes;
@property(nonatomic, assign) NSInteger presetCamera;
@property (assign, nonatomic) NSInteger flashMode;
@property (assign, nonatomic) CGFloat zoom;
@property (assign, nonatomic) NSInteger autoFocus;
@property (assign, nonatomic) float focusDepth;
@property (assign, nonatomic) NSInteger whiteBalance;
@property (nonatomic, assign, getter=isReadingBarCodes) BOOL barCodeReading;
- (id)initWithBridge:(RCTBridge *)bridge;
- (void)updateType;
- (void)updateFlashMode;
- (void)updateFocusMode;
- (void)updateFocusDepth;
- (void)updateZoom;
- (void)updateWhiteBalance;
- (void)updateFaceDetecting:(id)isDetectingFaces;
- (void)updateFaceDetectionMode:(id)requestedMode;
- (void)updateFaceDetectionLandmarks:(id)requestedLandmarks;
- (void)updateFaceDetectionClassifications:(id)requestedClassifications;
- (void)takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;
- (void)record:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;
- (void)stopRecording;
- (void)setupOrDisableBarcodeScanner;
- (void)onReady:(NSDictionary *)event;
- (void)onMountingError:(NSDictionary *)event;
- (void)onCodeRead:(NSDictionary *)event;
- (void)onFacesDetected:(NSDictionary *)event;
@end

752
ios/RN/RNCamera.m Normal file
View File

@ -0,0 +1,752 @@
#import "RNCamera.h"
#import "RNCameraUtils.h"
#import "RNImageUtils.h"
#import "RNFileSystem.h"
#import <React/RCTEventDispatcher.h>
#import <React/RCTLog.h>
#import <React/RCTUtils.h>
#import <React/UIView+React.h>
@interface RNCamera ()
@property (nonatomic, weak) RCTBridge *bridge;
@property (nonatomic, assign, getter=isSessionPaused) BOOL paused;
@property (nonatomic, strong) RCTPromiseResolveBlock videoRecordedResolve;
@property (nonatomic, strong) RCTPromiseRejectBlock videoRecordedReject;
@property (nonatomic, strong) id faceDetectorManager;
@property (nonatomic, copy) RCTDirectEventBlock onCameraReady;
@property (nonatomic, copy) RCTDirectEventBlock onMountError;
@property (nonatomic, copy) RCTDirectEventBlock onBarCodeRead;
@property (nonatomic, copy) RCTDirectEventBlock onFacesDetected;
@end
@implementation RNCamera
static NSDictionary *defaultFaceDetectorOptions = nil;
- (id)initWithBridge:(RCTBridge *)bridge
{
if ((self = [super init])) {
self.bridge = bridge;
self.session = [AVCaptureSession new];
self.sessionQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
self.faceDetectorManager = [self createFaceDetectorManager];
#if !(TARGET_IPHONE_SIMULATOR)
self.previewLayer =
[AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.needsDisplayOnBoundsChange = YES;
#endif
self.paused = NO;
[self changePreviewOrientation:[UIApplication sharedApplication].statusBarOrientation];
[self initializeCaptureSessionInput];
[self startSession];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(orientationChanged:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
self.autoFocus = -1;
// [[NSNotificationCenter defaultCenter] addObserver:self
// selector:@selector(bridgeDidForeground:)
// name:EX_UNVERSIONED(@"EXKernelBridgeDidForegroundNotification")
// object:self.bridge];
//
// [[NSNotificationCenter defaultCenter] addObserver:self
// selector:@selector(bridgeDidBackground:)
// name:EX_UNVERSIONED(@"EXKernelBridgeDidBackgroundNotification")
// object:self.bridge];
}
return self;
}
- (void)onReady:(NSDictionary *)event
{
if (_onCameraReady) {
_onCameraReady(nil);
}
}
- (void)onMountingError:(NSDictionary *)event
{
if (_onMountError) {
_onMountError(event);
}
}
- (void)onCodeRead:(NSDictionary *)event
{
if (_onBarCodeRead) {
_onBarCodeRead(event);
}
}
- (void)layoutSubviews
{
[super layoutSubviews];
self.previewLayer.frame = self.bounds;
[self setBackgroundColor:[UIColor blackColor]];
[self.layer insertSublayer:self.previewLayer atIndex:0];
}
- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex
{
[self insertSubview:view atIndex:atIndex + 1];
[super insertReactSubview:view atIndex:atIndex];
return;
}
- (void)removeReactSubview:(UIView *)subview
{
[subview removeFromSuperview];
[super removeReactSubview:subview];
return;
}
- (void)removeFromSuperview
{
[self stopSession];
[super removeFromSuperview];
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIDeviceOrientationDidChangeNotification object:nil];
}
-(void)updateType
{
dispatch_async(self.sessionQueue, ^{
[self initializeCaptureSessionInput];
if (!self.session.isRunning) {
[self startSession];
}
});
}
- (void)updateFlashMode
{
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
if (self.flashMode == RNCameraFlashModeTorch) {
if (![device hasTorch])
return;
if (![device lockForConfiguration:&error]) {
if (error) {
RCTLogError(@"%s: %@", __func__, error);
}
return;
}
if (device.hasTorch && [device isTorchModeSupported:AVCaptureTorchModeOn])
{
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
[device setFlashMode:AVCaptureFlashModeOff];
[device setTorchMode:AVCaptureTorchModeOn];
[device unlockForConfiguration];
} else {
if (error) {
RCTLogError(@"%s: %@", __func__, error);
}
}
}
} else {
if (![device hasFlash])
return;
if (![device lockForConfiguration:&error]) {
if (error) {
RCTLogError(@"%s: %@", __func__, error);
}
return;
}
if (device.hasFlash && [device isFlashModeSupported:self.flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
if ([device isTorchModeSupported:AVCaptureTorchModeOff]) {
[device setTorchMode:AVCaptureTorchModeOff];
}
[device setFlashMode:self.flashMode];
[device unlockForConfiguration];
} else {
if (error) {
RCTLogError(@"%s: %@", __func__, error);
}
}
}
}
[device unlockForConfiguration];
}
- (void)updateFocusMode
{
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
if (![device lockForConfiguration:&error]) {
if (error) {
RCTLogError(@"%s: %@", __func__, error);
}
return;
}
if ([device isFocusModeSupported:self.autoFocus]) {
if ([device lockForConfiguration:&error]) {
[device setFocusMode:self.autoFocus];
} else {
if (error) {
RCTLogError(@"%s: %@", __func__, error);
}
}
}
[device unlockForConfiguration];
}
- (void)updateFocusDepth
{
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
if (self.autoFocus < 0 || device.focusMode != RNCameraAutoFocusOff) {
return;
}
if (![device respondsToSelector:@selector(isLockingFocusWithCustomLensPositionSupported)] || ![device isLockingFocusWithCustomLensPositionSupported]) {
RCTLogWarn(@"%s: Setting focusDepth isn't supported for this camera device", __func__);
return;
}
if (![device lockForConfiguration:&error]) {
if (error) {
RCTLogError(@"%s: %@", __func__, error);
}
return;
}
__weak __typeof__(device) weakDevice = device;
[device setFocusModeLockedWithLensPosition:self.focusDepth completionHandler:^(CMTime syncTime) {
[weakDevice unlockForConfiguration];
}];
}
- (void)updateZoom {
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
if (![device lockForConfiguration:&error]) {
if (error) {
RCTLogError(@"%s: %@", __func__, error);
}
return;
}
device.videoZoomFactor = (device.activeFormat.videoMaxZoomFactor - 1.0) * self.zoom + 1.0;
[device unlockForConfiguration];
}
- (void)updateWhiteBalance
{
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
if (![device lockForConfiguration:&error]) {
if (error) {
RCTLogError(@"%s: %@", __func__, error);
}
return;
}
if (self.whiteBalance == RNCameraWhiteBalanceAuto) {
[device setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
[device unlockForConfiguration];
} else {
AVCaptureWhiteBalanceTemperatureAndTintValues temperatureAndTint = {
.temperature = [RNCameraUtils temperatureForWhiteBalance:self.whiteBalance],
.tint = 0,
};
AVCaptureWhiteBalanceGains rgbGains = [device deviceWhiteBalanceGainsForTemperatureAndTintValues:temperatureAndTint];
__weak __typeof__(device) weakDevice = device;
if ([device lockForConfiguration:&error]) {
[device setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:rgbGains completionHandler:^(CMTime syncTime) {
[weakDevice unlockForConfiguration];
}];
} else {
if (error) {
RCTLogError(@"%s: %@", __func__, error);
}
}
}
[device unlockForConfiguration];
}
- (void)updateFaceDetecting:(id)faceDetecting
{
[_faceDetectorManager setIsEnabled:faceDetecting];
}
- (void)updateFaceDetectionMode:(id)requestedMode
{
[_faceDetectorManager setMode:requestedMode];
}
- (void)updateFaceDetectionLandmarks:(id)requestedLandmarks
{
[_faceDetectorManager setLandmarksDetected:requestedLandmarks];
}
- (void)updateFaceDetectionClassifications:(id)requestedClassifications
{
[_faceDetectorManager setClassificationsDetected:requestedClassifications];
}
- (void)takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
{
AVCaptureConnection *connection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[connection setVideoOrientation:[RNCameraUtils videoOrientationForDeviceOrientation:[[UIDevice currentDevice] orientation]]];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer && !error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *takenImage = [UIImage imageWithData:imageData];
CGRect frame = [_previewLayer metadataOutputRectOfInterestForRect:self.frame];
CGImageRef takenCGImage = takenImage.CGImage;
size_t width = CGImageGetWidth(takenCGImage);
size_t height = CGImageGetHeight(takenCGImage);
CGRect cropRect = CGRectMake(frame.origin.x * width, frame.origin.y * height, frame.size.width * width, frame.size.height * height);
takenImage = [RNImageUtils cropImage:takenImage toRect:cropRect];
if ([options[@"mirrorImage"] boolValue]) {
takenImage = [RNImageUtils mirrorImage:takenImage];
}
NSMutableDictionary *response = [[NSMutableDictionary alloc] init];
float quality = [options[@"quality"] floatValue];
NSData *takenImageData = UIImageJPEGRepresentation(takenImage, quality);
NSString *path = [RNFileSystem generatePathInDirectory:[[RNFileSystem cacheDirectoryPath] stringByAppendingPathComponent:@"Camera"] withExtension:@".jpg"];
response[@"uri"] = [RNImageUtils writeImage:takenImageData toPath:path];
response[@"width"] = @(takenImage.size.width);
response[@"height"] = @(takenImage.size.height);
if ([options[@"base64"] boolValue]) {
response[@"base64"] = [takenImageData base64EncodedStringWithOptions:0];
}
if ([options[@"exif"] boolValue]) {
int imageRotation;
switch (takenImage.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationRightMirrored:
imageRotation = 90;
break;
case UIImageOrientationRight:
case UIImageOrientationLeftMirrored:
imageRotation = -90;
break;
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
imageRotation = 180;
break;
case UIImageOrientationUpMirrored:
default:
imageRotation = 0;
break;
}
[RNImageUtils updatePhotoMetadata:imageSampleBuffer withAdditionalData:@{ @"Orientation": @(imageRotation) } inResponse:response]; // TODO
}
resolve(response);
} else {
reject(@"E_IMAGE_CAPTURE_FAILED", @"Image could not be captured", error);
}
}];
}
- (void)record:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
{
if (_movieFileOutput == nil) {
// At the time of writing AVCaptureMovieFileOutput and AVCaptureVideoDataOutput (> GMVDataOutput)
// cannot coexist on the same AVSession (see: https://stackoverflow.com/a/4986032/1123156).
// We stop face detection here and restart it in when AVCaptureMovieFileOutput finishes recording.
[_faceDetectorManager stopFaceDetection];
[self setupMovieFileCapture];
}
if (self.movieFileOutput != nil && !self.movieFileOutput.isRecording && _videoRecordedResolve == nil && _videoRecordedReject == nil) {
if (options[@"maxDuration"]) {
Float64 maxDuration = [options[@"maxDuration"] floatValue];
self.movieFileOutput.maxRecordedDuration = CMTimeMakeWithSeconds(maxDuration, 30);
}
if (options[@"maxFileSize"]) {
self.movieFileOutput.maxRecordedFileSize = [options[@"maxFileSize"] integerValue];
}
if (options[@"quality"]) {
[self updateSessionPreset:[RNCameraUtils captureSessionPresetForVideoResolution:(RNCameraVideoResolution)[options[@"quality"] integerValue]]];
}
[self updateSessionAudioIsMuted:!!options[@"mute"]];
AVCaptureConnection *connection = [self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
[connection setVideoOrientation:[RNCameraUtils videoOrientationForInterfaceOrientation:[[UIApplication sharedApplication] statusBarOrientation]]];
dispatch_async(self.sessionQueue, ^{
NSString *path = [RNFileSystem generatePathInDirectory:[[RNFileSystem cacheDirectoryPath] stringByAppendingString:@"Camera"] withExtension:@".mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:path];
[self.movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
self.videoRecordedResolve = resolve;
self.videoRecordedReject = reject;
});
}
}
- (void)stopRecording
{
[self.movieFileOutput stopRecording];
}
- (void)startSession
{
#if TARGET_IPHONE_SIMULATOR
return;
#endif
// NSDictionary *cameraPermissions = [EXCameraPermissionRequester permissions];
// if (![cameraPermissions[@"status"] isEqualToString:@"granted"]) {
// [self onMountingError:@{@"message": @"Camera permissions not granted - component could not be rendered."}];
// return;
// }
dispatch_async(self.sessionQueue, ^{
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
return;
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([self.session canAddOutput:stillImageOutput]) {
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
[self.session addOutput:stillImageOutput];
[stillImageOutput setHighResolutionStillImageOutputEnabled:YES];
self.stillImageOutput = stillImageOutput;
}
[_faceDetectorManager maybeStartFaceDetectionOnSession:_session withPreviewLayer:_previewLayer];
[self setupOrDisableBarcodeScanner];
__weak RNCamera *weakSelf = self;
[self setRuntimeErrorHandlingObserver:
[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
RNCamera *strongSelf = weakSelf;
dispatch_async(strongSelf.sessionQueue, ^{
// Manually restarting the session since it must
// have been stopped due to an error.
[strongSelf.session startRunning];
[strongSelf onReady:nil];
});
}]];
[self.session startRunning];
[self onReady:nil];
});
}
- (void)stopSession
{
#if TARGET_IPHONE_SIMULATOR
return;
#endif
dispatch_async(self.sessionQueue, ^{
[_faceDetectorManager stopFaceDetection];
[self.previewLayer removeFromSuperlayer];
[self.session commitConfiguration];
[self.session stopRunning];
for (AVCaptureInput *input in self.session.inputs) {
[self.session removeInput:input];
}
for (AVCaptureOutput *output in self.session.outputs) {
[self.session removeOutput:output];
}
});
}
- (void)initializeCaptureSessionInput
{
if (self.videoCaptureDeviceInput.device.position == self.presetCamera) {
return;
}
__block UIInterfaceOrientation interfaceOrientation;
void (^statusBlock)() = ^() {
interfaceOrientation = [[UIApplication sharedApplication] statusBarOrientation];
};
if ([NSThread isMainThread]) {
statusBlock();
} else {
dispatch_sync(dispatch_get_main_queue(), statusBlock);
}
AVCaptureVideoOrientation orientation = [RNCameraUtils videoOrientationForInterfaceOrientation:interfaceOrientation];
dispatch_async(self.sessionQueue, ^{
[self.session beginConfiguration];
NSError *error = nil;
AVCaptureDevice *captureDevice = [RNCameraUtils deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error || captureDeviceInput == nil) {
RCTLog(@"%s: %@", __func__, error);
return;
}
[self.session removeInput:self.videoCaptureDeviceInput];
if ([self.session canAddInput:captureDeviceInput]) {
[self.session addInput:captureDeviceInput];
self.videoCaptureDeviceInput = captureDeviceInput;
[self updateFlashMode];
[self updateZoom];
[self updateFocusMode];
[self updateFocusDepth];
[self updateWhiteBalance];
[self.previewLayer.connection setVideoOrientation:orientation];
[self _updateMetadataObjectsToRecognize];
}
[self.session commitConfiguration];
});
}
#pragma mark - internal
- (void)updateSessionPreset:(NSString *)preset
{
#if !(TARGET_IPHONE_SIMULATOR)
if (preset) {
dispatch_async(self.sessionQueue, ^{
[self.session beginConfiguration];
if ([self.session canSetSessionPreset:preset]) {
self.session.sessionPreset = preset;
}
[self.session commitConfiguration];
});
}
#endif
}
- (void)updateSessionAudioIsMuted:(BOOL)isMuted
{
dispatch_async(self.sessionQueue, ^{
[self.session beginConfiguration];
for (AVCaptureDeviceInput* input in [self.session inputs]) {
if ([input.device hasMediaType:AVMediaTypeAudio]) {
if (isMuted) {
[self.session removeInput:input];
}
[self.session commitConfiguration];
return;
}
}
if (!isMuted) {
NSError *error = nil;
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (error || audioDeviceInput == nil) {
RCTLogWarn(@"%s: %@", __func__, error);
return;
}
if ([self.session canAddInput:audioDeviceInput]) {
[self.session addInput:audioDeviceInput];
}
}
[self.session commitConfiguration];
});
}
- (void)bridgeDidForeground:(NSNotification *)notification
{
if (![self.session isRunning] && [self isSessionPaused]) {
self.paused = NO;
dispatch_async( self.sessionQueue, ^{
[self.session startRunning];
});
}
}
- (void)bridgeDidBackground:(NSNotification *)notification
{
if ([self.session isRunning] && ![self isSessionPaused]) {
self.paused = YES;
dispatch_async( self.sessionQueue, ^{
[self.session stopRunning];
});
}
}
- (void)orientationChanged:(NSNotification *)notification
{
UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
[self changePreviewOrientation:orientation];
}
- (void)changePreviewOrientation:(UIInterfaceOrientation)orientation
{
__weak typeof(self) weakSelf = self;
AVCaptureVideoOrientation videoOrientation = [RNCameraUtils videoOrientationForInterfaceOrientation:orientation];
dispatch_async(dispatch_get_main_queue(), ^{
__strong typeof(self) strongSelf = weakSelf;
if (strongSelf && strongSelf.previewLayer.connection.isVideoOrientationSupported) {
[strongSelf.previewLayer.connection setVideoOrientation:videoOrientation];
}
});
}
# pragma mark - AVCaptureMetadataOutput
- (void)setupOrDisableBarcodeScanner
{
[self _setupOrDisableMetadataOutput];
[self _updateMetadataObjectsToRecognize];
}
- (void)_setupOrDisableMetadataOutput
{
if ([self isReadingBarCodes] && (_metadataOutput == nil || ![self.session.outputs containsObject:_metadataOutput])) {
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
if ([self.session canAddOutput:metadataOutput]) {
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
[self.session addOutput:metadataOutput];
self.metadataOutput = metadataOutput;
}
} else if (_metadataOutput != nil && ![self isReadingBarCodes]) {
[self.session removeOutput:_metadataOutput];
_metadataOutput = nil;
}
}
- (void)_updateMetadataObjectsToRecognize
{
if (_metadataOutput == nil) {
return;
}
NSArray<AVMetadataObjectType> *availableRequestedObjectTypes = [[NSArray alloc] init];
NSArray<AVMetadataObjectType> *requestedObjectTypes = [NSArray arrayWithArray:self.barCodeTypes];
NSArray<AVMetadataObjectType> *availableObjectTypes = _metadataOutput.availableMetadataObjectTypes;
for(AVMetadataObjectType objectType in requestedObjectTypes) {
if ([availableObjectTypes containsObject:objectType]) {
availableRequestedObjectTypes = [availableRequestedObjectTypes arrayByAddingObject:objectType];
}
}
[_metadataOutput setMetadataObjectTypes:availableRequestedObjectTypes];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
{
for(AVMetadataObject *metadata in metadataObjects) {
if([metadata isKindOfClass:[AVMetadataMachineReadableCodeObject class]]) {
AVMetadataMachineReadableCodeObject *codeMetadata = (AVMetadataMachineReadableCodeObject *) metadata;
for (id barcodeType in self.barCodeTypes) {
if ([metadata.type isEqualToString:barcodeType]) {
NSDictionary *event = @{
@"type" : codeMetadata.type,
@"data" : codeMetadata.stringValue
};
[self onCodeRead:event];
}
}
}
}
}
# pragma mark - AVCaptureMovieFileOutput
- (void)setupMovieFileCapture
{
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([self.session canAddOutput:movieFileOutput]) {
[self.session addOutput:movieFileOutput];
self.movieFileOutput = movieFileOutput;
}
}
- (void)cleanupMovieFileCapture
{
if ([_session.outputs containsObject:_movieFileOutput]) {
[_session removeOutput:_movieFileOutput];
_movieFileOutput = nil;
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
BOOL success = YES;
if ([error code] != noErr) {
NSNumber *value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value) {
success = [value boolValue];
}
}
if (success && self.videoRecordedResolve != nil) {
self.videoRecordedResolve(@{ @"uri": outputFileURL.absoluteString });
} else if (self.videoRecordedReject != nil) {
self.videoRecordedReject(@"E_RECORDING_FAILED", @"An error occurred while recording a video.", error);
}
self.videoRecordedResolve = nil;
self.videoRecordedReject = nil;
[self cleanupMovieFileCapture];
// If face detection has been running prior to recording to file
// we reenable it here (see comment in -record).
[_faceDetectorManager maybeStartFaceDetectionOnSession:_session withPreviewLayer:_previewLayer];
if (self.session.sessionPreset != AVCaptureSessionPresetHigh) {
[self updateSessionPreset:AVCaptureSessionPresetHigh];
}
}
# pragma mark - Face detector
- (id)createFaceDetectorManager
{
Class faceDetectorManagerClass = NSClassFromString(@"RNFaceDetectorManager");
Class faceDetectorManagerStubClass = NSClassFromString(@"RNFaceDetectorManagerStub");
if (faceDetectorManagerClass) {
return [[faceDetectorManagerClass alloc] initWithSessionQueue:_sessionQueue delegate:self];
} else if (faceDetectorManagerStubClass) {
return [[faceDetectorManagerStubClass alloc] init];
}
return nil;
}
- (void)onFacesDetected:(NSArray<NSDictionary *> *)faces
{
if (_onFacesDetected) {
_onFacesDetected(@{
@"type": @"face",
@"faces": faces
});
}
}
@end

Some files were not shown because too many files have changed in this diff Show More