rename expo to simple RN, add js files, and export them inside the same camera package

This commit is contained in:
Sibelius Seraphini 2018-01-01 19:16:02 -02:00
parent ffc74e5e6e
commit a0817132d6
19 changed files with 661 additions and 91 deletions

246
THIRD-PARTY-LICENSES Normal file
View File

@ -0,0 +1,246 @@
===============================================================================
expo/expo
https://github.com/expo/expo
-------------------------------------------------------------------------------
BSD License
For Exponent software
Copyright (c) 2015-present, 650 Industries, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names 650 Industries, Exponent, nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
===============================================================================
google/cameraview
https://github.com/google/cameraview
-------------------------------------------------------------------------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -10,11 +10,19 @@ import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.uimanager.ViewManager;
import com.facebook.react.bridge.JavaScriptModule;
import org.reactnative.camera.CameraModule;
import org.reactnative.camera.CameraViewManager;
import org.reactnative.facedetector.FaceDetectorModule;
public class RCTCameraPackage implements ReactPackage {
@Override
public List<NativeModule> createNativeModules(ReactApplicationContext reactApplicationContext) {
return Collections.<NativeModule>singletonList(new RCTCameraModule(reactApplicationContext));
return Arrays.<NativeModule>asList(
new RCTCameraModule(reactApplicationContext),
new CameraModule(reactApplicationContext),
new FaceDetectorModule(reactApplicationContext)
);
}
// Deprecated in RN 0.47
@ -24,8 +32,10 @@ public class RCTCameraPackage implements ReactPackage {
@Override
public List<ViewManager> createViewManagers(ReactApplicationContext reactApplicationContext) {
//noinspection ArraysAsListWithZeroOrOneArgument
return Collections.<ViewManager>singletonList(new RCTCameraViewManager());
return Arrays.<ViewManager>asList(
new RCTCameraViewManager(),
new CameraViewManager()
);
}
}

View File

@ -2,7 +2,7 @@ package org.reactnative.camera;
import android.content.Context;
import org.reactnative.facedetector.ExpoFaceDetector;
import org.reactnative.facedetector.RNFaceDetector;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
@ -70,7 +70,7 @@ public class CameraModule extends ReactContextBaseJavaModule {
@Override
public String getName() {
return "ExponentCameraModule";
return "RNCameraModule";
}
@Nullable
@ -94,8 +94,8 @@ public class CameraModule extends ReactContextBaseJavaModule {
private Map<String, Object> getFaceDetectionModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("fast", ExpoFaceDetector.FAST_MODE);
put("accurate", ExpoFaceDetector.ACCURATE_MODE);
put("fast", RNFaceDetector.FAST_MODE);
put("accurate", RNFaceDetector.ACCURATE_MODE);
}
});
}
@ -103,8 +103,8 @@ public class CameraModule extends ReactContextBaseJavaModule {
private Map<String, Object> getFaceDetectionClassificationsConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", ExpoFaceDetector.ALL_CLASSIFICATIONS);
put("none", ExpoFaceDetector.NO_CLASSIFICATIONS);
put("all", RNFaceDetector.ALL_CLASSIFICATIONS);
put("none", RNFaceDetector.NO_CLASSIFICATIONS);
}
});
}
@ -112,8 +112,8 @@ public class CameraModule extends ReactContextBaseJavaModule {
private Map<String, Object> getFaceDetectionLandmarksConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", ExpoFaceDetector.ALL_LANDMARKS);
put("none", ExpoFaceDetector.NO_LANDMARKS);
put("all", RNFaceDetector.ALL_LANDMARKS);
put("none", RNFaceDetector.NO_LANDMARKS);
}
});
}

View File

@ -21,7 +21,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
public class CameraViewManager extends ViewGroupManager<ExpoCameraView> {
public class CameraViewManager extends ViewGroupManager<RNCameraView> {
public enum Events {
EVENT_CAMERA_READY("onCameraReady"),
EVENT_ON_MOUNT_ERROR("onMountError"),
@ -41,10 +41,10 @@ public class CameraViewManager extends ViewGroupManager<ExpoCameraView> {
}
}
private static final String REACT_CLASS = "ExponentCamera";
private static final String REACT_CLASS = "RNnentCamera";
private static CameraViewManager instance;
private ExpoCameraView mCameraView;
private RNCameraView mCameraView;
public CameraViewManager() {
super();
@ -59,8 +59,8 @@ public class CameraViewManager extends ViewGroupManager<ExpoCameraView> {
}
@Override
protected ExpoCameraView createViewInstance(ThemedReactContext themedReactContext) {
mCameraView = new ExpoCameraView(themedReactContext);
protected RNCameraView createViewInstance(ThemedReactContext themedReactContext) {
mCameraView = new RNCameraView(themedReactContext);
return mCameraView;
}
@ -75,42 +75,42 @@ public class CameraViewManager extends ViewGroupManager<ExpoCameraView> {
}
@ReactProp(name = "type")
public void setType(ExpoCameraView view, int type) {
public void setType(RNCameraView view, int type) {
view.setFacing(type);
}
@ReactProp(name = "ratio")
public void setRatio(ExpoCameraView view, String ratio) {
public void setRatio(RNCameraView view, String ratio) {
view.setAspectRatio(AspectRatio.parse(ratio));
}
@ReactProp(name = "flashMode")
public void setFlashMode(ExpoCameraView view, int torchMode) {
public void setFlashMode(RNCameraView view, int torchMode) {
view.setFlash(torchMode);
}
@ReactProp(name = "autoFocus")
public void setAutoFocus(ExpoCameraView view, boolean autoFocus) {
public void setAutoFocus(RNCameraView view, boolean autoFocus) {
view.setAutoFocus(autoFocus);
}
@ReactProp(name = "focusDepth")
public void setFocusDepth(ExpoCameraView view, float depth) {
public void setFocusDepth(RNCameraView view, float depth) {
view.setFocusDepth(depth);
}
@ReactProp(name = "zoom")
public void setZoom(ExpoCameraView view, float zoom) {
public void setZoom(RNCameraView view, float zoom) {
view.setZoom(zoom);
}
@ReactProp(name = "whiteBalance")
public void setWhiteBalance(ExpoCameraView view, int whiteBalance) {
public void setWhiteBalance(RNCameraView view, int whiteBalance) {
view.setWhiteBalance(whiteBalance);
}
@ReactProp(name = "barCodeTypes")
public void setBarCodeTypes(ExpoCameraView view, ReadableArray barCodeTypes) {
public void setBarCodeTypes(RNCameraView view, ReadableArray barCodeTypes) {
if (barCodeTypes == null) {
return;
}
@ -122,27 +122,27 @@ public class CameraViewManager extends ViewGroupManager<ExpoCameraView> {
}
@ReactProp(name = "barCodeScannerEnabled")
public void setBarCodeScanning(ExpoCameraView view, boolean barCodeScannerEnabled) {
public void setBarCodeScanning(RNCameraView view, boolean barCodeScannerEnabled) {
view.setShouldScanBarCodes(barCodeScannerEnabled);
}
@ReactProp(name = "faceDetectorEnabled")
public void setFaceDetecting(ExpoCameraView view, boolean faceDetectorEnabled) {
public void setFaceDetecting(RNCameraView view, boolean faceDetectorEnabled) {
view.setShouldDetectFaces(faceDetectorEnabled);
}
@ReactProp(name = "faceDetectionMode")
public void setFaceDetectionMode(ExpoCameraView view, int mode) {
public void setFaceDetectionMode(RNCameraView view, int mode) {
view.setFaceDetectionMode(mode);
}
@ReactProp(name = "faceDetectionLandmarks")
public void setFaceDetectionLandmarks(ExpoCameraView view, int landmarks) {
public void setFaceDetectionLandmarks(RNCameraView view, int landmarks) {
view.setFaceDetectionLandmarks(landmarks);
}
@ReactProp(name = "faceDetectionClassifications")
public void setFaceDetectionClassifications(ExpoCameraView view, int classifications) {
public void setFaceDetectionClassifications(RNCameraView view, int classifications) {
view.setFaceDetectionClassifications(classifications);
}
@ -154,7 +154,7 @@ public class CameraViewManager extends ViewGroupManager<ExpoCameraView> {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} else {
Bitmap image = ExpoCameraViewHelper.generateSimulatorPhoto(mCameraView.getWidth(), mCameraView.getHeight());
Bitmap image = RNCameraViewHelper.generateSimulatorPhoto(mCameraView.getWidth(), mCameraView.getHeight());
ByteBuffer byteBuffer = ByteBuffer.allocate(image.getRowBytes() * image.getHeight());
image.copyPixelsToBuffer(byteBuffer);
new ResolveTakenPictureAsyncTask(byteBuffer.array(), promise, options).execute();
@ -163,7 +163,7 @@ public class CameraViewManager extends ViewGroupManager<ExpoCameraView> {
public void record(final ReadableMap options, final Promise promise) {
// TODO fix this
// Exponent.getInstance().getPermissions(new Exponent.PermissionsListener() {
// RNnent.getInstance().getPermissions(new RNnent.PermissionsListener() {
// @Override
// public void permissionsGranted() {
// if (mCameraView.isCameraOpened()) {

View File

@ -16,7 +16,7 @@ import org.reactnative.camera.tasks.FaceDetectorAsyncTask;
import org.reactnative.camera.tasks.FaceDetectorAsyncTaskDelegate;
import org.reactnative.camera.tasks.ResolveTakenPictureAsyncTask;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.facedetector.ExpoFaceDetector;
import org.reactnative.facedetector.RNFaceDetector;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.Promise;
@ -40,7 +40,7 @@ import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
public class ExpoCameraView extends CameraView implements LifecycleEventListener, BarCodeScannerAsyncTaskDelegate, FaceDetectorAsyncTaskDelegate {
public class RNCameraView extends CameraView implements LifecycleEventListener, BarCodeScannerAsyncTaskDelegate, FaceDetectorAsyncTaskDelegate {
private Queue<Promise> mPictureTakenPromises = new ConcurrentLinkedQueue<>();
private Map<Promise, ReadableMap> mPictureTakenOptions = new ConcurrentHashMap<>();
private Promise mVideoRecordedPromise;
@ -52,29 +52,29 @@ public class ExpoCameraView extends CameraView implements LifecycleEventListener
// Scanning-related properties
private final MultiFormatReader mMultiFormatReader = new MultiFormatReader();
private final ExpoFaceDetector mFaceDetector;
private final RNFaceDetector mFaceDetector;
private boolean mShouldDetectFaces = false;
private boolean mShouldScanBarCodes = false;
private int mFaceDetectorMode = ExpoFaceDetector.FAST_MODE;
private int mFaceDetectionLandmarks = ExpoFaceDetector.NO_LANDMARKS;
private int mFaceDetectionClassifications = ExpoFaceDetector.NO_CLASSIFICATIONS;
private int mFaceDetectorMode = RNFaceDetector.FAST_MODE;
private int mFaceDetectionLandmarks = RNFaceDetector.NO_LANDMARKS;
private int mFaceDetectionClassifications = RNFaceDetector.NO_CLASSIFICATIONS;
public ExpoCameraView(ThemedReactContext themedReactContext) {
public RNCameraView(ThemedReactContext themedReactContext) {
super(themedReactContext);
initBarcodeReader();
mFaceDetector = new ExpoFaceDetector(themedReactContext);
mFaceDetector = new RNFaceDetector(themedReactContext);
setupFaceDetector();
themedReactContext.addLifecycleEventListener(this);
addCallback(new Callback() {
@Override
public void onCameraOpened(CameraView cameraView) {
ExpoCameraViewHelper.emitCameraReadyEvent(cameraView);
RNCameraViewHelper.emitCameraReadyEvent(cameraView);
}
@Override
public void onMountError(CameraView cameraView) {
ExpoCameraViewHelper.emitMountErrorEvent(cameraView);
RNCameraViewHelper.emitMountErrorEvent(cameraView);
}
@Override
@ -101,7 +101,7 @@ public class ExpoCameraView extends CameraView implements LifecycleEventListener
@Override
public void onFramePreview(CameraView cameraView, byte[] data, int width, int height, int rotation) {
int correctRotation = ExpoCameraViewHelper.getCorrectCameraRotation(rotation, getFacing());
int correctRotation = RNCameraViewHelper.getCorrectCameraRotation(rotation, getFacing());
if (mShouldScanBarCodes && !barCodeScannerTaskLock && cameraView instanceof BarCodeScannerAsyncTaskDelegate) {
barCodeScannerTaskLock = true;
@ -165,7 +165,7 @@ public class ExpoCameraView extends CameraView implements LifecycleEventListener
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
if (options.hasKey("quality")) {
profile = ExpoCameraViewHelper.getCamcorderProfile(options.getInt("quality"));
profile = RNCameraViewHelper.getCamcorderProfile(options.getInt("quality"));
}
boolean recordAudio = !options.hasKey("mute");
@ -213,7 +213,7 @@ public class ExpoCameraView extends CameraView implements LifecycleEventListener
return;
}
ExpoCameraViewHelper.emitBarCodeReadEvent(this, barCode);
RNCameraViewHelper.emitBarCodeReadEvent(this, barCode);
}
public void onBarCodeScanningTaskCompleted() {
@ -265,15 +265,15 @@ public class ExpoCameraView extends CameraView implements LifecycleEventListener
SparseArray<Face> facesDetected = facesReported == null ? new SparseArray<Face>() : facesReported;
ImageDimensions dimensions = new ImageDimensions(sourceWidth, sourceHeight, sourceRotation, getFacing());
ExpoCameraViewHelper.emitFacesDetectedEvent(this, facesDetected, dimensions);
RNCameraViewHelper.emitFacesDetectedEvent(this, facesDetected, dimensions);
}
public void onFaceDetectionError(ExpoFaceDetector faceDetector) {
public void onFaceDetectionError(RNFaceDetector faceDetector) {
if (!mShouldDetectFaces) {
return;
}
ExpoCameraViewHelper.emitFaceDetectionErrorEvent(this, faceDetector);
RNCameraViewHelper.emitFaceDetectionErrorEvent(this, faceDetector);
}
@Override
@ -290,7 +290,7 @@ public class ExpoCameraView extends CameraView implements LifecycleEventListener
} else {
WritableMap error = Arguments.createMap();
error.putString("message", "Camera permissions not granted - component could not be rendered.");
ExpoCameraViewHelper.emitMountErrorEvent(this);
RNCameraViewHelper.emitMountErrorEvent(this);
}
}

View File

@ -16,7 +16,7 @@ import org.reactnative.camera.events.CameraReadyEvent;
import org.reactnative.camera.events.FaceDetectionErrorEvent;
import org.reactnative.camera.events.FacesDetectedEvent;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.facedetector.ExpoFaceDetector;
import org.reactnative.facedetector.RNFaceDetector;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.WritableMap;
@ -32,7 +32,7 @@ import java.util.Calendar;
import java.util.Locale;
import java.util.UUID;
public class ExpoCameraViewHelper {
public class RNCameraViewHelper {
// Mount error event
public static void emitMountErrorEvent(ViewGroup view) {
@ -73,7 +73,7 @@ public class ExpoCameraViewHelper {
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
public static void emitFaceDetectionErrorEvent(ViewGroup view, ExpoFaceDetector faceDetector) {
public static void emitFaceDetectionErrorEvent(ViewGroup view, RNFaceDetector faceDetector) {
FaceDetectionErrorEvent event = FaceDetectionErrorEvent.obtain(view.getId(), faceDetector);
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);

View File

@ -3,7 +3,7 @@ package org.reactnative.camera.events;
import android.support.v4.util.Pools;
import org.reactnative.camera.CameraViewManager;
import org.reactnative.facedetector.ExpoFaceDetector;
import org.reactnative.facedetector.RNFaceDetector;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
@ -13,10 +13,10 @@ import java.util.Date;
public class FaceDetectionErrorEvent extends Event<FaceDetectionErrorEvent> {
private static final Pools.SynchronizedPool<FaceDetectionErrorEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private ExpoFaceDetector mFaceDetector;
private RNFaceDetector mFaceDetector;
private FaceDetectionErrorEvent() {}
public static FaceDetectionErrorEvent obtain(int viewTag, ExpoFaceDetector faceDetector) {
public static FaceDetectionErrorEvent obtain(int viewTag, RNFaceDetector faceDetector) {
FaceDetectionErrorEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new FaceDetectionErrorEvent();
@ -25,7 +25,7 @@ public class FaceDetectionErrorEvent extends Event<FaceDetectionErrorEvent> {
return event;
}
private void init(int viewTag, ExpoFaceDetector faceDetector) {
private void init(int viewTag, RNFaceDetector faceDetector) {
super.init(viewTag);
mFaceDetector = faceDetector;
}

View File

@ -2,9 +2,9 @@ package org.reactnative.camera.tasks;
import android.util.SparseArray;
import org.reactnative.facedetector.ExpoFaceDetector;
import org.reactnative.facedetector.ExpoFrame;
import org.reactnative.facedetector.ExpoFrameFactory;
import org.reactnative.facedetector.RNFaceDetector;
import org.reactnative.facedetector.RNFrame;
import org.reactnative.facedetector.RNFrameFactory;
import com.google.android.gms.vision.face.Face;
public class FaceDetectorAsyncTask extends android.os.AsyncTask<Void, Void, SparseArray<Face>> {
@ -12,12 +12,12 @@ public class FaceDetectorAsyncTask extends android.os.AsyncTask<Void, Void, Spar
private int mWidth;
private int mHeight;
private int mRotation;
private ExpoFaceDetector mFaceDetector;
private RNFaceDetector mFaceDetector;
private FaceDetectorAsyncTaskDelegate mDelegate;
public FaceDetectorAsyncTask(
FaceDetectorAsyncTaskDelegate delegate,
ExpoFaceDetector faceDetector,
RNFaceDetector faceDetector,
byte[] imageData,
int width,
int height,
@ -37,7 +37,7 @@ public class FaceDetectorAsyncTask extends android.os.AsyncTask<Void, Void, Spar
return null;
}
ExpoFrame frame = ExpoFrameFactory.buildFrame(mImageData, mWidth, mHeight, mRotation);
RNFrame frame = RNFrameFactory.buildFrame(mImageData, mWidth, mHeight, mRotation);
return mFaceDetector.detect(frame);
}

View File

@ -2,11 +2,11 @@ package org.reactnative.camera.tasks;
import android.util.SparseArray;
import org.reactnative.facedetector.ExpoFaceDetector;
import org.reactnative.facedetector.RNFaceDetector;
import com.google.android.gms.vision.face.Face;
public interface FaceDetectorAsyncTaskDelegate {
void onFacesDetected(SparseArray<Face> face, int sourceWidth, int sourceHeight, int sourceRotation);
void onFaceDetectionError(ExpoFaceDetector faceDetector);
void onFaceDetectionError(RNFaceDetector faceDetector);
void onFaceDetectingTaskCompleted();
}

View File

@ -16,7 +16,7 @@ import java.util.Map;
import javax.annotation.Nullable;
public class FaceDetectorModule extends ReactContextBaseJavaModule {
private static final String TAG = "ExpoFaceDetector";
private static final String TAG = "RNFaceDetector";
// private ScopedContext mScopedContext;
private static ReactApplicationContext mScopedContext;
@ -43,8 +43,8 @@ private static ReactApplicationContext mScopedContext;
private Map<String, Object> getFaceDetectionModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("fast", ExpoFaceDetector.FAST_MODE);
put("accurate", ExpoFaceDetector.ACCURATE_MODE);
put("fast", RNFaceDetector.FAST_MODE);
put("accurate", RNFaceDetector.ACCURATE_MODE);
}
});
}
@ -52,8 +52,8 @@ private static ReactApplicationContext mScopedContext;
private Map<String, Object> getFaceDetectionClassificationsConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", ExpoFaceDetector.ALL_CLASSIFICATIONS);
put("none", ExpoFaceDetector.NO_CLASSIFICATIONS);
put("all", RNFaceDetector.ALL_CLASSIFICATIONS);
put("none", RNFaceDetector.NO_CLASSIFICATIONS);
}
});
}
@ -61,8 +61,8 @@ private static ReactApplicationContext mScopedContext;
private Map<String, Object> getFaceDetectionLandmarksConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", ExpoFaceDetector.ALL_LANDMARKS);
put("none", ExpoFaceDetector.NO_LANDMARKS);
put("all", RNFaceDetector.ALL_LANDMARKS);
put("none", RNFaceDetector.NO_LANDMARKS);
}
});
}

View File

@ -8,7 +8,7 @@ import org.reactnative.camera.utils.ImageDimensions;
import com.google.android.gms.vision.face.Face;
import com.google.android.gms.vision.face.FaceDetector;
public class ExpoFaceDetector {
public class RNFaceDetector {
public static int ALL_CLASSIFICATIONS = FaceDetector.ALL_CLASSIFICATIONS;
public static int NO_CLASSIFICATIONS = FaceDetector.NO_CLASSIFICATIONS;
public static int ALL_LANDMARKS = FaceDetector.ALL_LANDMARKS;
@ -25,7 +25,7 @@ public class ExpoFaceDetector {
private float mMinFaceSize = 0.15f;
private int mMode = FAST_MODE;
public ExpoFaceDetector(Context context) {
public RNFaceDetector(Context context) {
mBuilder = new FaceDetector.Builder(context);
mBuilder.setMinFaceSize(mMinFaceSize);
mBuilder.setMode(mMode);
@ -43,7 +43,7 @@ public class ExpoFaceDetector {
return mFaceDetector.isOperational();
}
public SparseArray<Face> detect(ExpoFrame frame) {
public SparseArray<Face> detect(RNFrame frame) {
// If the frame has different dimensions, create another face detector.
// Otherwise we will get nasty "inconsistent image dimensions" error from detector
// and no face will be detected.

View File

@ -5,15 +5,15 @@ import com.google.android.gms.vision.Frame;
/**
* Wrapper around Frame allowing us to track Frame dimensions.
* Tracking dimensions is used in ExpoFaceDetector to provide painless FaceDetector recreation
* Tracking dimensions is used in RNFaceDetector to provide painless FaceDetector recreation
* when image dimensions change.
*/
public class ExpoFrame {
public class RNFrame {
private Frame mFrame;
private ImageDimensions mDimensions;
public ExpoFrame(Frame frame, ImageDimensions dimensions) {
public RNFrame(Frame frame, ImageDimensions dimensions) {
mFrame = frame;
mDimensions = dimensions;
}

View File

@ -8,8 +8,8 @@ import com.google.android.gms.vision.Frame;
import java.nio.ByteBuffer;
public class ExpoFrameFactory {
public static ExpoFrame buildFrame(byte[] bitmapData, int width, int height, int rotation) {
public class RNFrameFactory {
public static RNFrame buildFrame(byte[] bitmapData, int width, int height, int rotation) {
Frame.Builder builder = new Frame.Builder();
ByteBuffer byteBuffer = ByteBuffer.wrap(bitmapData);
@ -31,13 +31,13 @@ public class ExpoFrameFactory {
ImageDimensions dimensions = new ImageDimensions(width, height, rotation);
return new ExpoFrame(builder.build(), dimensions);
return new RNFrame(builder.build(), dimensions);
}
public static ExpoFrame buildFrame(Bitmap bitmap) {
public static RNFrame buildFrame(Bitmap bitmap) {
Frame.Builder builder = new Frame.Builder();
builder.setBitmap(bitmap);
ImageDimensions dimensions = new ImageDimensions(bitmap.getWidth(), bitmap.getHeight());
return new ExpoFrame(builder.build(), dimensions);
return new RNFrame(builder.build(), dimensions);
}
}

View File

@ -9,9 +9,9 @@ import android.os.AsyncTask;
import android.util.Log;
import android.util.SparseArray;
import org.reactnative.facedetector.ExpoFaceDetector;
import org.reactnative.facedetector.ExpoFrame;
import org.reactnative.facedetector.ExpoFrameFactory;
import org.reactnative.facedetector.RNFaceDetector;
import org.reactnative.facedetector.RNFrame;
import org.reactnative.facedetector.RNFrameFactory;
import org.reactnative.facedetector.FaceDetectorUtils;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
@ -41,7 +41,7 @@ public class FileFaceDetectionAsyncTask extends AsyncTask<Void, Void, SparseArra
private Context mContext;
private ReadableMap mOptions;
private int mOrientation = ExifInterface.ORIENTATION_UNDEFINED;
private ExpoFaceDetector mExpoFaceDetector;
private RNFaceDetector mRNFaceDetector;
public FileFaceDetectionAsyncTask(Context context, ReadableMap options, Promise promise) {
mUri = options.getString("uri");
@ -89,7 +89,7 @@ public class FileFaceDetectionAsyncTask extends AsyncTask<Void, Void, SparseArra
return null;
}
mExpoFaceDetector = detectorForOptions(mOptions, mContext);
mRNFaceDetector = detectorForOptions(mOptions, mContext);
Bitmap bitmap = BitmapFactory.decodeFile(mPath);
mWidth = bitmap.getWidth();
mHeight = bitmap.getHeight();
@ -101,8 +101,8 @@ public class FileFaceDetectionAsyncTask extends AsyncTask<Void, Void, SparseArra
Log.e(ERROR_TAG, "Reading orientation from file `" + mPath + "` failed.", e);
}
ExpoFrame frame = ExpoFrameFactory.buildFrame(bitmap);
return mExpoFaceDetector.detect(frame);
RNFrame frame = RNFrameFactory.buildFrame(bitmap);
return mRNFaceDetector.detect(frame);
}
@Override
@ -128,12 +128,12 @@ public class FileFaceDetectionAsyncTask extends AsyncTask<Void, Void, SparseArra
image.putString("uri", mUri);
result.putMap("image", image);
mExpoFaceDetector.release();
mRNFaceDetector.release();
mPromise.resolve(result);
}
private static ExpoFaceDetector detectorForOptions(ReadableMap options, Context context) {
ExpoFaceDetector detector = new ExpoFaceDetector(context);
private static RNFaceDetector detectorForOptions(ReadableMap options, Context context) {
RNFaceDetector detector = new RNFaceDetector(context);
detector.setTrackingEnabled(false);
if(options.hasKey(MODE_OPTION_KEY)) {

View File

@ -9,6 +9,7 @@
"logo": "https://opencollective.com/opencollective/logo.txt"
},
"dependencies": {
"lodash": "^4.17.4",
"prop-types": "^15.5.10"
},
"devDependencies": {

View File

@ -1,3 +1,4 @@
// @flow
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import {

60
src/FaceDetector.js Normal file
View File

@ -0,0 +1,60 @@
// @flow
import { NativeModules } from 'react-native';
const faceDetectionDisabledMessage = 'Face detection has not been included in this build.';
const FaceDetectorModule: Object = NativeModules.RNFaceDetector || {
stubbed: true,
Mode: {},
Landmarks: {},
Classifications: {},
detectFaces: () => new Promise((_, reject) => reject(faceDetectionDisabledMessage)),
};
type Point = { x: number, y: number };
export type FaceFeature = {
bounds: {
size: {
width: number,
height: number,
},
origin: Point,
},
smilingProbability?: number,
leftEarPosition?: Point,
rightEarPosition?: Point,
leftEyePosition?: Point,
leftEyeOpenProbability?: number,
rightEyePosition?: Point,
rightEyeOpenProbability?: number,
leftCheekPosition?: Point,
rightCheekPosition?: Point,
leftMouthPosition?: Point,
mouthPosition?: Point,
rightMouthPosition?: Point,
bottomMouthPosition?: Point,
noseBasePosition?: Point,
yawAngle?: number,
rollAngle?: number,
};
type DetectionOptions = {
mode?: $Keys<typeof FaceDetectorModule.Mode>,
detectLandmarks?: $Keys<typeof FaceDetectorModule.Landmarks>,
runClassifications?: $Keys<typeof FaceDetectorModule.Classifications>,
};
export default class FaceDetector {
static Constants = {
Mode: FaceDetectorModule.Mode,
Landmarks: FaceDetectorModule.Landmarks,
Classifications: FaceDetectorModule.Classifications,
};
static detectFacesAsync(uri: string, options: ?DetectionOptions): Promise<Array<FaceFeature>> {
return FaceDetectorModule.detectFaces({ ...options, uri });
}
}
export const Constants = FaceDetector.Constants;

250
src/RNCamera.js Normal file
View File

@ -0,0 +1,250 @@
// @flow
import React from 'react';
import PropTypes from 'prop-types';
import { mapValues } from 'lodash';
import { Platform, NativeModules, ViewPropTypes, requireNativeComponent } from 'react-native';
import type { FaceFeature } from './FaceDetector';
type PictureOptions = {
quality?: number,
};
type TrackedFaceFeature = FaceFeature & {
faceID?: number,
};
type RecordingOptions = {
maxDuration?: number,
maxFileSize?: number,
quality?: number | string,
};
type EventCallbackArgumentsType = {
nativeEvent: Object,
};
type PropsType = ViewPropTypes & {
zoom?: number,
ratio?: string,
focusDepth?: number,
type?: number | string,
onCameraReady?: Function,
onBarCodeRead?: Function,
faceDetectionMode?: number,
flashMode?: number | string,
barCodeTypes?: Array<string>,
whiteBalance?: number | string,
faceDetectionLandmarks?: number,
autoFocus?: string | boolean | number,
faceDetectionClassifications?: number,
onFacesDetected?: ({ faces: Array<TrackedFaceFeature> }) => void,
};
const CameraManager: Object =
NativeModules.RNCameraManager || NativeModules.RNCameraModule || {
stubbed: true,
Type: {
back: 1,
},
AutoFocus: {
on: 1
},
FlashMode: {
off: 1,
},
WhiteBalance: {},
BarCodeType: {},
FaceDetection: {
fast: 1,
Mode: {},
Landmarks: {
none: 0,
},
Classifications: {
none: 0,
},
},
};
const EventThrottleMs = 500;
export default class Camera extends React.Component<PropsType> {
static Constants = {
Type: CameraManager.Type,
FlashMode: CameraManager.FlashMode,
AutoFocus: CameraManager.AutoFocus,
WhiteBalance: CameraManager.WhiteBalance,
VideoQuality: CameraManager.VideoQuality,
BarCodeType: CameraManager.BarCodeType,
FaceDetection: CameraManager.FaceDetection,
};
// Values under keys from this object will be transformed to native options
static ConversionTables = {
type: CameraManager.Type,
flashMode: CameraManager.FlashMode,
autoFocus: CameraManager.AutoFocus,
whiteBalance: CameraManager.WhiteBalance,
faceDetectionMode: CameraManager.FaceDetection.Mode,
faceDetectionLandmarks: CameraManager.FaceDetection.Landmarks,
faceDetectionClassifications: CameraManager.FaceDetection.Classifications,
};
static propTypes = {
...ViewPropTypes,
zoom: PropTypes.number,
ratio: PropTypes.string,
focusDepth: PropTypes.number,
onMountError: PropTypes.func,
onCameraReady: PropTypes.func,
onBarCodeRead: PropTypes.func,
onFacesDetected: PropTypes.func,
faceDetectionMode: PropTypes.number,
faceDetectionLandmarks: PropTypes.number,
faceDetectionClassifications: PropTypes.number,
barCodeTypes: PropTypes.arrayOf(PropTypes.string),
type: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
flashMode: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
whiteBalance: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
autoFocus: PropTypes.oneOfType([PropTypes.string, PropTypes.number, PropTypes.bool]),
};
static defaultProps: Object = {
zoom: 0,
ratio: '4:3',
focusDepth: 0,
type: CameraManager.Type.back,
autoFocus: CameraManager.AutoFocus.on,
flashMode: CameraManager.FlashMode.off,
whiteBalance: CameraManager.WhiteBalance.auto,
faceDetectionMode: CameraManager.FaceDetection.fast,
barCodeTypes: Object.values(CameraManager.BarCodeType),
faceDetectionLandmarks: CameraManager.FaceDetection.Landmarks.none,
faceDetectionClassifications: CameraManager.FaceDetection.Classifications.none,
};
_lastEvents: { [string]: string };
_lastEventsTimes: { [string]: Date };
constructor(props: PropsType) {
super(props);
this._lastEvents = {};
this._lastEventsTimes = {};
}
async takePictureAsync(options?: PictureOptions) {
if (!options) {
options = {};
}
if (!options.quality) {
options.quality = 1;
}
return await CameraManager.takePicture(options);
}
async getSupportedRatiosAsync() {
if (Platform.OS === 'android') {
return await CameraManager.getSupportedRatios();
} else {
throw new Error('Ratio is not supported on iOS');
}
}
async recordAsync(options?: RecordingOptions) {
if (!options || typeof options !== 'object') {
options = {};
} else if (typeof options.quality === 'string') {
options.quality = Camera.Constants.VideoQuality[options.quality];
}
return await CameraManager.record(options);
}
stopRecording() {
CameraManager.stopRecording();
}
_onMountError = () => {
if (this.props.onMountError) {
this.props.onMountError();
}
};
_onCameraReady = () => {
if (this.props.onCameraReady) {
this.props.onCameraReady();
}
};
_onObjectDetected = (callback: ?Function) => ({ nativeEvent }: EventCallbackArgumentsType) => {
const { type } = nativeEvent;
if (
this._lastEvents[type] &&
this._lastEventsTimes[type] &&
JSON.stringify(nativeEvent) === this._lastEvents[type] &&
new Date() - this._lastEventsTimes[type] < EventThrottleMs
) {
return;
}
if (callback) {
callback(nativeEvent);
this._lastEventsTimes[type] = new Date();
this._lastEvents[type] = JSON.stringify(nativeEvent);
}
};
render() {
const nativeProps = this._convertNativeProps(this.props);
return (
<RNCamera
{...nativeProps}
onMountError={this._onMountError}
onCameraRead={this._onCameraReady}
onBarCodeRead={this._onObjectDetected(this.props.onBarCodeRead)}
onFacesDetected={this._onObjectDetected(this.props.onFacesDetected)}
/>
);
}
_convertNativeProps(props: PropsType) {
const newProps = mapValues(props, this._convertProp);
if (props.onBarCodeRead) {
newProps.barCodeScannerEnabled = true;
}
if (props.onFacesDetected) {
newProps.faceDetectorEnabled = true;
}
if (Platform.OS === 'ios') {
delete newProps.ratio;
}
return newProps;
}
_convertProp(value: *, key: string): * {
if (typeof value === 'string' && Camera.ConversionTables[key]) {
return Camera.ConversionTables[key][value];
}
return value;
}
}
export const Constants = Camera.Constants;
const RNCamera = requireNativeComponent('RNCamera', Camera, {
nativeOnly: {
onCameraReady: true,
onMountError: true,
onBarCodeRead: true,
onFaceDetected: true,
faceDetectorEnabled: true,
barCodeScannerEnabled: true,
},
});

View File

@ -1,3 +1,5 @@
import Camera from './Camera';
export RNCamera from './RNCamera';
export FaceDetector from './FaceDetector';
export default Camera;