feat(android): add code for face detection

This commit is contained in:
Malte Peters 2018-02-13 17:31:34 +01:00
parent 917692c16e
commit 32c5385bc3
7 changed files with 768 additions and 617 deletions

View File

@ -2,7 +2,7 @@ package org.reactnative.camera;
import android.graphics.Bitmap;
import android.os.Build;
import android.support.media.ExifInterface;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
@ -13,225 +13,240 @@ import com.facebook.react.bridge.WritableArray;
import com.facebook.react.uimanager.NativeViewHierarchyManager;
import com.facebook.react.uimanager.UIBlock;
import com.facebook.react.uimanager.UIManagerModule;
import com.facebook.react.uimanager.ViewProps;
import com.facebook.react.views.scroll.ReactScrollViewHelper;
import com.google.android.cameraview.AspectRatio;
import com.google.zxing.BarcodeFormat;
import org.reactnative.camera.tasks.ResolveTakenPictureAsyncTask;
import org.reactnative.camera.utils.ScopedContext;
import org.reactnative.facedetector.RNFaceDetector;
import com.lwansbrough.RCTCamera.RCTCameraModule;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import org.reactnative.camera.tasks.ResolveTakenPictureAsyncTask;
import org.reactnative.camera.utils.ScopedContext;
import org.reactnative.facedetector.RNFaceDetector;
public class CameraModule extends ReactContextBaseJavaModule {
private static final String TAG = "CameraModule";
private ScopedContext mScopedContext;
static final int VIDEO_2160P = 0;
public static final Map<String, Object> VALID_BARCODE_TYPES = Collections.unmodifiableMap(new C06121());
static final int VIDEO_1080P = 1;
static final int VIDEO_720P = 2;
static final int VIDEO_2160P = 0;
static final int VIDEO_480P = 3;
static final int VIDEO_4x3 = 4;
static final int VIDEO_720P = 2;
private ScopedContext mScopedContext;
public static final Map<String, Object> VALID_BARCODE_TYPES =
Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("aztec", BarcodeFormat.AZTEC.toString());
put("ean13", BarcodeFormat.EAN_13.toString());
put("ean8", BarcodeFormat.EAN_8.toString());
put("qr", BarcodeFormat.QR_CODE.toString());
put("pdf417", BarcodeFormat.PDF_417.toString());
put("upc_e", BarcodeFormat.UPC_E.toString());
put("datamatrix", BarcodeFormat.DATA_MATRIX.toString());
put("code39", BarcodeFormat.CODE_39.toString());
put("code93", BarcodeFormat.CODE_93.toString());
put("interleaved2of5", BarcodeFormat.ITF.toString());
put("codabar", BarcodeFormat.CODABAR.toString());
put("code128", BarcodeFormat.CODE_128.toString());
put("maxicode", BarcodeFormat.MAXICODE.toString());
put("rss14", BarcodeFormat.RSS_14.toString());
put("rssexpanded", BarcodeFormat.RSS_EXPANDED.toString());
put("upc_a", BarcodeFormat.UPC_A.toString());
put("upc_ean", BarcodeFormat.UPC_EAN_EXTENSION.toString());
static class C06121 extends HashMap<String, Object> {
C06121() {
put("aztec", BarcodeFormat.AZTEC.toString());
put("ean13", BarcodeFormat.EAN_13.toString());
put("ean8", BarcodeFormat.EAN_8.toString());
put("qr", BarcodeFormat.QR_CODE.toString());
put("pdf417", BarcodeFormat.PDF_417.toString());
put("upc_e", BarcodeFormat.UPC_E.toString());
put("datamatrix", BarcodeFormat.DATA_MATRIX.toString());
put("code39", BarcodeFormat.CODE_39.toString());
put("code93", BarcodeFormat.CODE_93.toString());
put("interleaved2of5", BarcodeFormat.ITF.toString());
put("codabar", BarcodeFormat.CODABAR.toString());
put("code128", BarcodeFormat.CODE_128.toString());
put("maxicode", BarcodeFormat.MAXICODE.toString());
put("rss14", BarcodeFormat.RSS_14.toString());
put("rssexpanded", BarcodeFormat.RSS_EXPANDED.toString());
put("upc_a", BarcodeFormat.UPC_A.toString());
put("upc_ean", BarcodeFormat.UPC_EAN_EXTENSION.toString());
}
}
class C06222 extends HashMap<String, Object> {
class C06161 extends HashMap<String, Object> {
class C06131 extends HashMap<String, Object> {
C06131() {
put("fast", Integer.valueOf(RNFaceDetector.FAST_MODE));
put("accurate", Integer.valueOf(RNFaceDetector.ACCURATE_MODE));
}
});
}
class C06142 extends HashMap<String, Object> {
C06142() {
put("all", Integer.valueOf(RNFaceDetector.ALL_CLASSIFICATIONS));
put("none", Integer.valueOf(RNFaceDetector.NO_CLASSIFICATIONS));
}
}
class C06153 extends HashMap<String, Object> {
C06153() {
put("all", Integer.valueOf(RNFaceDetector.ALL_LANDMARKS));
put("none", Integer.valueOf(RNFaceDetector.NO_LANDMARKS));
}
}
C06161() {
put("Mode", getFaceDetectionModeConstants());
put("Landmarks", getFaceDetectionLandmarksConstants());
put("Classifications", getFaceDetectionClassificationsConstants());
}
private Map<String, Object> getFaceDetectionModeConstants() {
return Collections.unmodifiableMap(new C06131());
}
private Map<String, Object> getFaceDetectionClassificationsConstants() {
return Collections.unmodifiableMap(new C06142());
}
private Map<String, Object> getFaceDetectionLandmarksConstants() {
return Collections.unmodifiableMap(new C06153());
}
}
class C06172 extends HashMap<String, Object> {
C06172() {
put("front", Integer.valueOf(1));
put("back", Integer.valueOf(0));
}
}
class C06183 extends HashMap<String, Object> {
C06183() {
put("off", Integer.valueOf(0));
put(ViewProps.ON, Integer.valueOf(1));
put(ReactScrollViewHelper.AUTO, Integer.valueOf(3));
put("torch", Integer.valueOf(2));
}
}
class C06194 extends HashMap<String, Object> {
C06194() {
put(ViewProps.ON, Boolean.valueOf(true));
put("off", Boolean.valueOf(false));
}
}
class C06205 extends HashMap<String, Object> {
C06205() {
put(ReactScrollViewHelper.AUTO, Integer.valueOf(0));
put("cloudy", Integer.valueOf(1));
put("sunny", Integer.valueOf(2));
put("shadow", Integer.valueOf(3));
put("fluorescent", Integer.valueOf(4));
put("incandescent", Integer.valueOf(5));
}
}
class C06216 extends HashMap<String, Object> {
C06216() {
put("2160p", Integer.valueOf(0));
put(RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_1080P, Integer.valueOf(1));
put(RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_720P, Integer.valueOf(2));
put(RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_480P, Integer.valueOf(3));
put("4:3", Integer.valueOf(4));
}
}
C06222() {
put("Type", getTypeConstants());
put("FlashMode", getFlashModeConstants());
put("AutoFocus", getAutoFocusConstants());
put(ExifInterface.TAG_WHITE_BALANCE, getWhiteBalanceConstants());
put("VideoQuality", getVideoQualityConstants());
put("BarCodeType", getBarCodeConstants());
put("FaceDetection", Collections.unmodifiableMap(new C06161()));
}
private Map<String, Object> getTypeConstants() {
return Collections.unmodifiableMap(new C06172());
}
private Map<String, Object> getFlashModeConstants() {
return Collections.unmodifiableMap(new C06183());
}
private Map<String, Object> getAutoFocusConstants() {
return Collections.unmodifiableMap(new C06194());
}
private Map<String, Object> getWhiteBalanceConstants() {
return Collections.unmodifiableMap(new C06205());
}
private Map<String, Object> getVideoQualityConstants() {
return Collections.unmodifiableMap(new C06216());
}
private Map<String, Object> getBarCodeConstants() {
return CameraModule.VALID_BARCODE_TYPES;
}
}
public CameraModule(ReactApplicationContext reactContext) {
super(reactContext);
mScopedContext = new ScopedContext(reactContext);
this.mScopedContext = new ScopedContext(reactContext);
}
public ScopedContext getScopedContext() {
return mScopedContext;
return this.mScopedContext;
}
@Override
public String getName() {
return "RNCameraModule";
}
@Nullable
@Override
public Map<String, Object> getConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("Type", getTypeConstants());
put("FlashMode", getFlashModeConstants());
put("AutoFocus", getAutoFocusConstants());
put("WhiteBalance", getWhiteBalanceConstants());
put("VideoQuality", getVideoQualityConstants());
put("BarCodeType", getBarCodeConstants());
put("FaceDetection", Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("Mode", getFaceDetectionModeConstants());
put("Landmarks", getFaceDetectionLandmarksConstants());
put("Classifications", getFaceDetectionClassificationsConstants());
}
private Map<String, Object> getFaceDetectionModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("fast", RNFaceDetector.FAST_MODE);
put("accurate", RNFaceDetector.ACCURATE_MODE);
}
});
}
private Map<String, Object> getFaceDetectionClassificationsConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", RNFaceDetector.ALL_CLASSIFICATIONS);
put("none", RNFaceDetector.NO_CLASSIFICATIONS);
}
});
}
private Map<String, Object> getFaceDetectionLandmarksConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", RNFaceDetector.ALL_LANDMARKS);
put("none", RNFaceDetector.NO_LANDMARKS);
}
});
}
}));
}
private Map<String, Object> getTypeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("front", Constants.FACING_FRONT);
put("back", Constants.FACING_BACK);
}
});
}
private Map<String, Object> getFlashModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("off", Constants.FLASH_OFF);
put("on", Constants.FLASH_ON);
put("auto", Constants.FLASH_AUTO);
put("torch", Constants.FLASH_TORCH);
}
});
}
private Map<String, Object> getAutoFocusConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("on", true);
put("off", false);
}
});
}
private Map<String, Object> getWhiteBalanceConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("auto", Constants.WB_AUTO);
put("cloudy", Constants.WB_CLOUDY);
put("sunny", Constants.WB_SUNNY);
put("shadow", Constants.WB_SHADOW);
put("fluorescent", Constants.WB_FLUORESCENT);
put("incandescent", Constants.WB_INCANDESCENT);
}
});
}
private Map<String, Object> getVideoQualityConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("2160p", VIDEO_2160P);
put("1080p", VIDEO_1080P);
put("720p", VIDEO_720P);
put("480p", VIDEO_480P);
put("4:3", VIDEO_4x3);
}
});
}
private Map<String, Object> getBarCodeConstants() {
return VALID_BARCODE_TYPES;
}
});
return Collections.unmodifiableMap(new C06222());
}
@ReactMethod
public void takePicture(final ReadableMap options, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
final File cacheDirectory = mScopedContext.getCacheDirectory();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void takePicture(ReadableMap options, int viewTag, Promise promise) {
ReactApplicationContext context = getReactApplicationContext();
final File cacheDirectory = this.mScopedContext.getCacheDirectory();
final int i = viewTag;
final ReadableMap readableMap = options;
final Promise promise2 = promise;
((UIManagerModule) context.getNativeModule(UIManagerModule.class)).addUIBlock(new UIBlock() {
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
RNCameraView cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
RNCameraView cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(i);
try {
if (!Build.FINGERPRINT.contains("generic")) {
if (cameraView.isCameraOpened()) {
cameraView.takePicture(options, promise, cacheDirectory);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} else {
if (Build.FINGERPRINT.contains("generic")) {
Bitmap image = RNCameraViewHelper.generateSimulatorPhoto(cameraView.getWidth(), cameraView.getHeight());
ByteBuffer byteBuffer = ByteBuffer.allocate(image.getRowBytes() * image.getHeight());
image.copyPixelsToBuffer(byteBuffer);
new ResolveTakenPictureAsyncTask(byteBuffer.array(), promise, options).execute();
new ResolveTakenPictureAsyncTask(byteBuffer.array(), promise2, readableMap).execute(new Void[0]);
} else if (cameraView.isCameraOpened()) {
cameraView.takePicture(readableMap, promise2, cacheDirectory);
} else {
promise2.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} catch (Exception e) {
promise.reject("E_CAMERA_BAD_VIEWTAG", "takePictureAsync: Expected a Camera component");
promise2.reject("E_CAMERA_BAD_VIEWTAG", "takePictureAsync: Expected a Camera component");
}
}
});
}
@ReactMethod
public void record(final ReadableMap options, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
final File cacheDirectory = mScopedContext.getCacheDirectory();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void record(ReadableMap options, int viewTag, Promise promise) {
ReactApplicationContext context = getReactApplicationContext();
final File cacheDirectory = this.mScopedContext.getCacheDirectory();
final int i = viewTag;
final ReadableMap readableMap = options;
final Promise promise2 = promise;
((UIManagerModule) context.getNativeModule(UIManagerModule.class)).addUIBlock(new UIBlock() {
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
RNCameraView cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(i);
if (cameraView.isCameraOpened()) {
cameraView.record(options, promise, cacheDirectory);
cameraView.record(readableMap, promise2, cacheDirectory);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
promise2.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} catch (Exception e) {
promise.reject("E_CAMERA_BAD_VIEWTAG", "recordAsync: Expected a Camera component");
promise2.reject("E_CAMERA_BAD_VIEWTAG", "recordAsync: Expected a Camera component");
}
}
});
@ -239,15 +254,10 @@ public class CameraModule extends ReactContextBaseJavaModule {
@ReactMethod
public void stopRecording(final int viewTag) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
((UIManagerModule) getReactApplicationContext().getNativeModule(UIManagerModule.class)).addUIBlock(new UIBlock() {
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
RNCameraView cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.stopRecording();
}
@ -260,28 +270,23 @@ public class CameraModule extends ReactContextBaseJavaModule {
@ReactMethod
public void getSupportedRatios(final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
((UIManagerModule) getReactApplicationContext().getNativeModule(UIManagerModule.class)).addUIBlock(new UIBlock() {
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
RNCameraView cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
WritableArray result = Arguments.createArray();
if (cameraView.isCameraOpened()) {
Set<AspectRatio> ratios = cameraView.getSupportedAspectRatios();
for (AspectRatio ratio : ratios) {
for (AspectRatio ratio : cameraView.getSupportedAspectRatios()) {
result.pushString(ratio.toString());
}
promise.resolve(result);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
return;
}
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
}
}

View File

@ -1,15 +1,12 @@
package org.reactnative.camera;
import android.Manifest;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.media.CamcorderProfile;
import android.os.Build;
import android.os.Build.VERSION;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.support.v4.view.ViewCompat;
import android.util.SparseArray;
import android.view.View;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.Promise;
@ -17,24 +14,11 @@ import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.ThemedReactContext;
import com.google.android.cameraview.CameraView;
import com.google.android.gms.vision.face.Face;
import com.google.android.cameraview.CameraView.Callback;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.DecodeHintType;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.Result;
import org.reactnative.camera.tasks.BarCodeScannerAsyncTask;
import org.reactnative.camera.tasks.BarCodeScannerAsyncTaskDelegate;
import org.reactnative.camera.tasks.FaceDetectorAsyncTask;
import org.reactnative.camera.tasks.FaceDetectorAsyncTaskDelegate;
import org.reactnative.camera.tasks.OpenCVProcessorAsyncTask;
import org.reactnative.camera.tasks.OpenCVProcessorAsyncTaskDelegate;
import org.reactnative.camera.tasks.ResolveTakenPictureAsyncTask;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.camera.utils.RNFileUtils;
import org.reactnative.facedetector.RNFaceDetector;
import org.reactnative.opencv.OpenCVProcessor;
import java.io.File;
import java.io.IOException;
import java.util.EnumMap;
@ -44,150 +28,140 @@ import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.reactnative.camera.tasks.BarCodeScannerAsyncTask;
import org.reactnative.camera.tasks.BarCodeScannerAsyncTaskDelegate;
import org.reactnative.camera.tasks.OpenCVProcessorAsyncTask;
import org.reactnative.camera.tasks.OpenCVProcessorAsyncTaskDelegate;
import org.reactnative.camera.tasks.ResolveTakenPictureAsyncTask;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.camera.utils.RNFileUtils;
import org.reactnative.facedetector.RNFaceDetector;
import org.reactnative.opencv.OpenCVProcessor;
public class RNCameraView extends CameraView implements LifecycleEventListener, BarCodeScannerAsyncTaskDelegate, OpenCVProcessorAsyncTaskDelegate {
private ThemedReactContext mThemedReactContext;
private Queue<Promise> mPictureTakenPromises = new ConcurrentLinkedQueue<>();
private Map<Promise, ReadableMap> mPictureTakenOptions = new ConcurrentHashMap<>();
private Map<Promise, File> mPictureTakenDirectories = new ConcurrentHashMap<>();
private Promise mVideoRecordedPromise;
private List<String> mBarCodeTypes = null;
private boolean mIsPaused = false;
private boolean mIsNew = true;
// Concurrency lock for scanners to avoid flooding the runtime
public volatile boolean barCodeScannerTaskLock = false;
public volatile boolean faceDetectorTaskLock = false;
// Scanning-related properties
private final MultiFormatReader mMultiFormatReader = new MultiFormatReader();
private List<String> mBarCodeTypes = null;
private int mFaceDetectionClassifications = RNFaceDetector.NO_CLASSIFICATIONS;
private int mFaceDetectionLandmarks = RNFaceDetector.NO_LANDMARKS;
private final RNFaceDetector mFaceDetector;
private final OpenCVProcessor openCVProcessor;
private int mFaceDetectorMode = RNFaceDetector.FAST_MODE;
private boolean mIsNew = true;
private boolean mIsPaused = false;
private final MultiFormatReader mMultiFormatReader = new MultiFormatReader();
private Map<Promise, File> mPictureTakenDirectories = new ConcurrentHashMap();
private Map<Promise, ReadableMap> mPictureTakenOptions = new ConcurrentHashMap();
private Queue<Promise> mPictureTakenPromises = new ConcurrentLinkedQueue();
private boolean mShouldDetectFaces = false;
private boolean mShouldScanBarCodes = false;
private int mFaceDetectorMode = RNFaceDetector.FAST_MODE;
private int mFaceDetectionLandmarks = RNFaceDetector.NO_LANDMARKS;
private int mFaceDetectionClassifications = RNFaceDetector.NO_CLASSIFICATIONS;
private ThemedReactContext mThemedReactContext;
private Promise mVideoRecordedPromise;
private final OpenCVProcessor openCVProcessor;
class C08941 extends Callback {
C08941() {
}
public void onCameraOpened(CameraView cameraView) {
RNCameraViewHelper.emitCameraReadyEvent(cameraView);
}
public void onMountError(CameraView cameraView) {
RNCameraViewHelper.emitMountErrorEvent(cameraView);
}
public void onPictureTaken(CameraView cameraView, byte[] data) {
Promise promise = (Promise) RNCameraView.this.mPictureTakenPromises.poll();
new ResolveTakenPictureAsyncTask(data, promise, (ReadableMap) RNCameraView.this.mPictureTakenOptions.remove(promise), (File) RNCameraView.this.mPictureTakenDirectories.remove(promise)).execute(new Void[0]);
}
public void onVideoRecorded(CameraView cameraView, String path) {
if (RNCameraView.this.mVideoRecordedPromise != null) {
if (path != null) {
WritableMap result = Arguments.createMap();
result.putString("uri", RNFileUtils.uriFromFile(new File(path)).toString());
RNCameraView.this.mVideoRecordedPromise.resolve(result);
} else {
RNCameraView.this.mVideoRecordedPromise.reject("E_RECORDING", "Couldn't stop recording - there is none in progress");
}
RNCameraView.this.mVideoRecordedPromise = null;
}
}
public void onFramePreview(CameraView cameraView, byte[] data, int width, int height, int rotation) {
int correctRotation = RNCameraViewHelper.getCorrectCameraRotation(rotation, RNCameraView.this.getFacing());
if (RNCameraView.this.mShouldScanBarCodes && !RNCameraView.this.barCodeScannerTaskLock && (cameraView instanceof BarCodeScannerAsyncTaskDelegate)) {
RNCameraView.this.barCodeScannerTaskLock = true;
new BarCodeScannerAsyncTask((BarCodeScannerAsyncTaskDelegate) cameraView, RNCameraView.this.mMultiFormatReader, data, width, height).execute(new Void[0]);
}
new OpenCVProcessorAsyncTask((OpenCVProcessorAsyncTaskDelegate) cameraView, RNCameraView.this.openCVProcessor, data, width, height, correctRotation).execute(new Void[0]);
}
}
public RNCameraView(ThemedReactContext themedReactContext) {
super(themedReactContext);
initBarcodeReader();
mThemedReactContext = themedReactContext;
mFaceDetector = new RNFaceDetector(themedReactContext);
openCVProcessor = new OpenCVProcessor(themedReactContext);
this.mThemedReactContext = themedReactContext;
this.mFaceDetector = new RNFaceDetector(themedReactContext);
this.openCVProcessor = new OpenCVProcessor(themedReactContext);
setupFaceDetector();
themedReactContext.addLifecycleEventListener(this);
addCallback(new Callback() {
@Override
public void onCameraOpened(CameraView cameraView) {
RNCameraViewHelper.emitCameraReadyEvent(cameraView);
}
@Override
public void onMountError(CameraView cameraView) {
RNCameraViewHelper.emitMountErrorEvent(cameraView);
}
@Override
public void onPictureTaken(CameraView cameraView, final byte[] data) {
Promise promise = mPictureTakenPromises.poll();
ReadableMap options = mPictureTakenOptions.remove(promise);
final File cacheDirectory = mPictureTakenDirectories.remove(promise);
new ResolveTakenPictureAsyncTask(data, promise, options, cacheDirectory).execute();
}
@Override
public void onVideoRecorded(CameraView cameraView, String path) {
if (mVideoRecordedPromise != null) {
if (path != null) {
WritableMap result = Arguments.createMap();
result.putString("uri", RNFileUtils.uriFromFile(new File(path)).toString());
mVideoRecordedPromise.resolve(result);
} else {
mVideoRecordedPromise.reject("E_RECORDING", "Couldn't stop recording - there is none in progress");
}
mVideoRecordedPromise = null;
}
}
@Override
public void onFramePreview(CameraView cameraView, byte[] data, int width, int height, int rotation) {
int correctRotation = RNCameraViewHelper.getCorrectCameraRotation(rotation, getFacing());
if (mShouldScanBarCodes && !barCodeScannerTaskLock && cameraView instanceof BarCodeScannerAsyncTaskDelegate) {
barCodeScannerTaskLock = true;
BarCodeScannerAsyncTaskDelegate delegate = (BarCodeScannerAsyncTaskDelegate) cameraView;
new BarCodeScannerAsyncTask(delegate, mMultiFormatReader, data, width, height).execute();
}
// if (mShouldDetectFaces && !faceDetectorTaskLock && cameraView instanceof FaceDetectorAsyncTaskDelegate) {
// faceDetectorTaskLock = true;
// FaceDetectorAsyncTaskDelegate delegate = (FaceDetectorAsyncTaskDelegate) cameraView;
// new FaceDetectorAsyncTask(delegate, mFaceDetector, data, width, height, correctRotation).execute();
// }
{
OpenCVProcessorAsyncTaskDelegate delegate = (OpenCVProcessorAsyncTaskDelegate) cameraView;
new OpenCVProcessorAsyncTask(delegate, openCVProcessor, data, width, height, correctRotation).execute();
}
}
});
addCallback(new C08941());
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
View preview = getView();
if (null == preview) {
return;
if (preview != null) {
setBackgroundColor(ViewCompat.MEASURED_STATE_MASK);
preview.layout(0, 0, right - left, bottom - top);
}
this.setBackgroundColor(Color.BLACK);
int width = right - left;
int height = bottom - top;
preview.layout(0, 0, width, height);
}
@Override
public void requestLayout() {
// React handles this for us, so we don't need to call super.requestLayout();
}
@Override
public void onViewAdded(View child) {
if (this.getView() == child || this.getView() == null) return;
// remove and readd view to make sure it is in the back.
// @TODO figure out why there was a z order issue in the first place and fix accordingly.
this.removeView(this.getView());
this.addView(this.getView(), 0);
if (getView() != child && getView() != null) {
removeView(getView());
addView(getView(), 0);
}
}
public void setBarCodeTypes(List<String> barCodeTypes) {
mBarCodeTypes = barCodeTypes;
this.mBarCodeTypes = barCodeTypes;
initBarcodeReader();
}
public void takePicture(ReadableMap options, final Promise promise, File cacheDirectory) {
mPictureTakenPromises.add(promise);
mPictureTakenOptions.put(promise, options);
mPictureTakenDirectories.put(promise, cacheDirectory);
public void takePicture(ReadableMap options, Promise promise, File cacheDirectory) {
this.mPictureTakenPromises.add(promise);
this.mPictureTakenOptions.put(promise, options);
this.mPictureTakenDirectories.put(promise, cacheDirectory);
super.takePicture();
}
public void record(ReadableMap options, final Promise promise, File cacheDirectory) {
public void record(ReadableMap options, Promise promise, File cacheDirectory) {
boolean recordAudio = true;
int maxFileSize = -1;
try {
int maxDuration;
String path = RNFileUtils.getOutputFilePath(cacheDirectory, ".mp4");
int maxDuration = options.hasKey("maxDuration") ? options.getInt("maxDuration") : -1;
int maxFileSize = options.hasKey("maxFileSize") ? options.getInt("maxFileSize") : -1;
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
if (options.hasKey("maxDuration")) {
maxDuration = options.getInt("maxDuration");
} else {
maxDuration = -1;
}
if (options.hasKey("maxFileSize")) {
maxFileSize = options.getInt("maxFileSize");
}
CamcorderProfile profile = CamcorderProfile.get(1);
if (options.hasKey("quality")) {
profile = RNCameraViewHelper.getCamcorderProfile(options.getInt("quality"));
}
boolean recordAudio = !options.hasKey("mute");
if (options.hasKey("mute")) {
recordAudio = false;
}
if (super.record(path, maxDuration * 1000, maxFileSize, recordAudio, profile)) {
mVideoRecordedPromise = promise;
this.mVideoRecordedPromise = promise;
} else {
promise.reject("E_RECORDING_FAILED", "Starting video recording failed. Another recording might be in progress.");
}
@ -196,160 +170,120 @@ public class RNCameraView extends CameraView implements LifecycleEventListener,
}
}
/**
* Initialize the barcode decoder.
* Supports all iOS codes except [code138, code39mod43, itf14]
* Additionally supports [codabar, code128, maxicode, rss14, rssexpanded, upc_a, upc_ean]
*/
private void initBarcodeReader() {
EnumMap<DecodeHintType, Object> hints = new EnumMap<>(DecodeHintType.class);
EnumMap<DecodeHintType, Object> hints = new EnumMap(DecodeHintType.class);
EnumSet<BarcodeFormat> decodeFormats = EnumSet.noneOf(BarcodeFormat.class);
if (mBarCodeTypes != null) {
for (String code : mBarCodeTypes) {
String formatString = (String) CameraModule.VALID_BARCODE_TYPES.get(code);
if (formatString != null) {
if (this.mBarCodeTypes != null) {
for (String code : this.mBarCodeTypes) {
if (((String) CameraModule.VALID_BARCODE_TYPES.get(code)) != null) {
decodeFormats.add(BarcodeFormat.valueOf(code));
}
}
}
hints.put(DecodeHintType.POSSIBLE_FORMATS, decodeFormats);
mMultiFormatReader.setHints(hints);
this.mMultiFormatReader.setHints(hints);
}
public void setShouldScanBarCodes(boolean shouldScanBarCodes) {
this.mShouldScanBarCodes = shouldScanBarCodes;
setScanning(mShouldDetectFaces || mShouldScanBarCodes);
boolean z = this.mShouldDetectFaces || this.mShouldScanBarCodes;
setScanning(z);
}
public void onBarCodeRead(Result barCode) {
String barCodeType = barCode.getBarcodeFormat().toString();
if (!mShouldScanBarCodes || !mBarCodeTypes.contains(barCodeType)) {
return;
if (this.mShouldScanBarCodes && this.mBarCodeTypes.contains(barCodeType)) {
RNCameraViewHelper.emitBarCodeReadEvent(this, barCode);
}
RNCameraViewHelper.emitBarCodeReadEvent(this, barCode);
}
public void onBarCodeScanningTaskCompleted() {
barCodeScannerTaskLock = false;
mMultiFormatReader.reset();
this.barCodeScannerTaskLock = false;
this.mMultiFormatReader.reset();
}
/**
* Initial setup of the face detector
*/
private void setupFaceDetector() {
mFaceDetector.setMode(mFaceDetectorMode);
mFaceDetector.setLandmarkType(mFaceDetectionLandmarks);
mFaceDetector.setClassificationType(mFaceDetectionClassifications);
mFaceDetector.setTracking(true);
this.mFaceDetector.setMode(this.mFaceDetectorMode);
this.mFaceDetector.setLandmarkType(this.mFaceDetectionLandmarks);
this.mFaceDetector.setClassificationType(this.mFaceDetectionClassifications);
this.mFaceDetector.setTracking(true);
}
public void setFaceDetectionLandmarks(int landmarks) {
mFaceDetectionLandmarks = landmarks;
if (mFaceDetector != null) {
mFaceDetector.setLandmarkType(landmarks);
this.mFaceDetectionLandmarks = landmarks;
if (this.mFaceDetector != null) {
this.mFaceDetector.setLandmarkType(landmarks);
}
}
public void setFaceDetectionClassifications(int classifications) {
mFaceDetectionClassifications = classifications;
if (mFaceDetector != null) {
mFaceDetector.setClassificationType(classifications);
this.mFaceDetectionClassifications = classifications;
if (this.mFaceDetector != null) {
this.mFaceDetector.setClassificationType(classifications);
}
}
public void setFaceDetectionMode(int mode) {
mFaceDetectorMode = mode;
if (mFaceDetector != null) {
mFaceDetector.setMode(mode);
this.mFaceDetectorMode = mode;
if (this.mFaceDetector != null) {
this.mFaceDetector.setMode(mode);
}
}
public void setShouldDetectFaces(boolean shouldDetectFaces) {
this.mShouldDetectFaces = shouldDetectFaces;
setScanning(mShouldDetectFaces || mShouldScanBarCodes);
boolean z = this.mShouldDetectFaces || this.mShouldScanBarCodes;
setScanning(z);
}
// public void onFacesDetected(SparseArray<Face> facesReported, int sourceWidth, int sourceHeight, int sourceRotation) {
// if (!mShouldDetectFaces) {
// return;
// }
//
// SparseArray<Face> facesDetected = facesReported == null ? new SparseArray<Face>() : facesReported;
//
// ImageDimensions dimensions = new ImageDimensions(sourceWidth, sourceHeight, sourceRotation, getFacing());
// RNCameraViewHelper.emitFacesDetectedEvent(this, facesDetected, dimensions);
// }
public void onFacesDetected(SparseArray<Object> facesReported, int sourceWidth, int sourceHeight, int sourceRotation) {
if (!mShouldDetectFaces) {
return;
public void onFacesDetected(SparseArray<Map<String, Float>> facesReported, int sourceWidth, int sourceHeight, int sourceRotation) {
if (this.mShouldDetectFaces && facesReported.size() != 0) {
RNCameraViewHelper.emitFacesDetectedEvent(this, facesReported, new ImageDimensions(sourceWidth, sourceHeight, sourceRotation, getFacing()));
}
SparseArray<Object> facesDetected = facesReported == null ? new SparseArray<Object>() : facesReported;
ImageDimensions dimensions = new ImageDimensions(sourceWidth, sourceHeight, sourceRotation, getFacing());
// RNCameraViewHelper.emitFacesDetectedEvent(this, facesDetected, dimensions);
}
@Override
public void onFaceDetectionError(OpenCVProcessor openCVProcessor) {
// RNCameraViewHelper.emitFaceDetectionErrorEvent(this, openCVProcessor);
}
public void onFaceDetectionError(RNFaceDetector faceDetector) {
if (!mShouldDetectFaces) {
return;
if (this.mShouldDetectFaces) {
RNCameraViewHelper.emitFaceDetectionErrorEvent(this, faceDetector);
}
RNCameraViewHelper.emitFaceDetectionErrorEvent(this, faceDetector);
}
@Override
public void onFaceDetectingTaskCompleted() {
faceDetectorTaskLock = false;
this.faceDetectorTaskLock = false;
}
@Override
public void onHostResume() {
if (hasCameraPermissions()) {
if ((mIsPaused && !isCameraOpened()) || mIsNew) {
mIsPaused = false;
mIsNew = false;
if (!Build.FINGERPRINT.contains("generic")) {
start();
}
}
} else {
WritableMap error = Arguments.createMap();
error.putString("message", "Camera permissions not granted - component could not be rendered.");
if (!hasCameraPermissions()) {
Arguments.createMap().putString("message", "Camera permissions not granted - component could not be rendered.");
RNCameraViewHelper.emitMountErrorEvent(this);
} else if ((this.mIsPaused && !isCameraOpened()) || this.mIsNew) {
this.mIsPaused = false;
this.mIsNew = false;
if (!Build.FINGERPRINT.contains("generic")) {
start();
}
}
}
@Override
public void onHostPause() {
if (!mIsPaused && isCameraOpened()) {
mIsPaused = true;
if (!this.mIsPaused && isCameraOpened()) {
this.mIsPaused = true;
stop();
}
}
@Override
public void onHostDestroy() {
mFaceDetector.release();
this.mFaceDetector.release();
stop();
}
private boolean hasCameraPermissions() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
int result = ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA);
return result == PackageManager.PERMISSION_GRANTED;
} else {
if (VERSION.SDK_INT < 23 || ContextCompat.checkSelfPermission(getContext(), "android.permission.CAMERA") == 0) {
return true;
}
return false;
}
}
}

View File

@ -1,23 +1,27 @@
package org.reactnative.camera;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.media.CamcorderProfile;
import android.os.Build;
import android.os.Build.VERSION;
import android.support.media.ExifInterface;
import android.support.v4.view.InputDeviceCompat;
import android.support.v4.view.ViewCompat;
import android.util.SparseArray;
import android.view.ViewGroup;
import com.drew.metadata.exif.makernotes.OlympusMakernoteDirectory;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.UIManagerModule;
import com.google.android.cameraview.CameraView;
import com.google.android.gms.vision.face.Face;
import com.google.zxing.Result;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Locale;
import java.util.Map;
import org.opencv.imgproc.Imgproc;
import org.reactnative.camera.events.BarCodeReadEvent;
import org.reactnative.camera.events.CameraMountErrorEvent;
import org.reactnative.camera.events.CameraReadyEvent;
@ -26,230 +30,192 @@ import org.reactnative.camera.events.FacesDetectedEvent;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.facedetector.RNFaceDetector;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Locale;
public class RNCameraViewHelper {
public static final String[][] exifTags;
public static final String[][] exifTags = new String[][]{
{"string", ExifInterface.TAG_ARTIST},
{"int", ExifInterface.TAG_BITS_PER_SAMPLE},
{"int", ExifInterface.TAG_COMPRESSION},
{"string", ExifInterface.TAG_COPYRIGHT},
{"string", ExifInterface.TAG_DATETIME},
{"string", ExifInterface.TAG_IMAGE_DESCRIPTION},
{"int", ExifInterface.TAG_IMAGE_LENGTH},
{"int", ExifInterface.TAG_IMAGE_WIDTH},
{"int", ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT},
{"int", ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT_LENGTH},
{"string", ExifInterface.TAG_MAKE},
{"string", ExifInterface.TAG_MODEL},
{"int", ExifInterface.TAG_ORIENTATION},
{"int", ExifInterface.TAG_PHOTOMETRIC_INTERPRETATION},
{"int", ExifInterface.TAG_PLANAR_CONFIGURATION},
{"double", ExifInterface.TAG_PRIMARY_CHROMATICITIES},
{"double", ExifInterface.TAG_REFERENCE_BLACK_WHITE},
{"int", ExifInterface.TAG_RESOLUTION_UNIT},
{"int", ExifInterface.TAG_ROWS_PER_STRIP},
{"int", ExifInterface.TAG_SAMPLES_PER_PIXEL},
{"string", ExifInterface.TAG_SOFTWARE},
{"int", ExifInterface.TAG_STRIP_BYTE_COUNTS},
{"int", ExifInterface.TAG_STRIP_OFFSETS},
{"int", ExifInterface.TAG_TRANSFER_FUNCTION},
{"double", ExifInterface.TAG_WHITE_POINT},
{"double", ExifInterface.TAG_X_RESOLUTION},
{"double", ExifInterface.TAG_Y_CB_CR_COEFFICIENTS},
{"int", ExifInterface.TAG_Y_CB_CR_POSITIONING},
{"int", ExifInterface.TAG_Y_CB_CR_SUB_SAMPLING},
{"double", ExifInterface.TAG_Y_RESOLUTION},
{"double", ExifInterface.TAG_APERTURE_VALUE},
{"double", ExifInterface.TAG_BRIGHTNESS_VALUE},
{"string", ExifInterface.TAG_CFA_PATTERN},
{"int", ExifInterface.TAG_COLOR_SPACE},
{"string", ExifInterface.TAG_COMPONENTS_CONFIGURATION},
{"double", ExifInterface.TAG_COMPRESSED_BITS_PER_PIXEL},
{"int", ExifInterface.TAG_CONTRAST},
{"int", ExifInterface.TAG_CUSTOM_RENDERED},
{"string", ExifInterface.TAG_DATETIME_DIGITIZED},
{"string", ExifInterface.TAG_DATETIME_ORIGINAL},
{"string", ExifInterface.TAG_DEVICE_SETTING_DESCRIPTION},
{"double", ExifInterface.TAG_DIGITAL_ZOOM_RATIO},
{"string", ExifInterface.TAG_EXIF_VERSION},
{"double", ExifInterface.TAG_EXPOSURE_BIAS_VALUE},
{"double", ExifInterface.TAG_EXPOSURE_INDEX},
{"int", ExifInterface.TAG_EXPOSURE_MODE},
{"int", ExifInterface.TAG_EXPOSURE_PROGRAM},
{"double", ExifInterface.TAG_EXPOSURE_TIME},
{"double", ExifInterface.TAG_F_NUMBER},
{"string", ExifInterface.TAG_FILE_SOURCE},
{"int", ExifInterface.TAG_FLASH},
{"double", ExifInterface.TAG_FLASH_ENERGY},
{"string", ExifInterface.TAG_FLASHPIX_VERSION},
{"double", ExifInterface.TAG_FOCAL_LENGTH},
{"int", ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM},
{"int", ExifInterface.TAG_FOCAL_PLANE_RESOLUTION_UNIT},
{"double", ExifInterface.TAG_FOCAL_PLANE_X_RESOLUTION},
{"double", ExifInterface.TAG_FOCAL_PLANE_Y_RESOLUTION},
{"int", ExifInterface.TAG_GAIN_CONTROL},
{"int", ExifInterface.TAG_ISO_SPEED_RATINGS},
{"string", ExifInterface.TAG_IMAGE_UNIQUE_ID},
{"int", ExifInterface.TAG_LIGHT_SOURCE},
{"string", ExifInterface.TAG_MAKER_NOTE},
{"double", ExifInterface.TAG_MAX_APERTURE_VALUE},
{"int", ExifInterface.TAG_METERING_MODE},
{"int", ExifInterface.TAG_NEW_SUBFILE_TYPE},
{"string", ExifInterface.TAG_OECF},
{"int", ExifInterface.TAG_PIXEL_X_DIMENSION},
{"int", ExifInterface.TAG_PIXEL_Y_DIMENSION},
{"string", ExifInterface.TAG_RELATED_SOUND_FILE},
{"int", ExifInterface.TAG_SATURATION},
{"int", ExifInterface.TAG_SCENE_CAPTURE_TYPE},
{"string", ExifInterface.TAG_SCENE_TYPE},
{"int", ExifInterface.TAG_SENSING_METHOD},
{"int", ExifInterface.TAG_SHARPNESS},
{"double", ExifInterface.TAG_SHUTTER_SPEED_VALUE},
{"string", ExifInterface.TAG_SPATIAL_FREQUENCY_RESPONSE},
{"string", ExifInterface.TAG_SPECTRAL_SENSITIVITY},
{"int", ExifInterface.TAG_SUBFILE_TYPE},
{"string", ExifInterface.TAG_SUBSEC_TIME},
{"string", ExifInterface.TAG_SUBSEC_TIME_DIGITIZED},
{"string", ExifInterface.TAG_SUBSEC_TIME_ORIGINAL},
{"int", ExifInterface.TAG_SUBJECT_AREA},
{"double", ExifInterface.TAG_SUBJECT_DISTANCE},
{"int", ExifInterface.TAG_SUBJECT_DISTANCE_RANGE},
{"int", ExifInterface.TAG_SUBJECT_LOCATION},
{"string", ExifInterface.TAG_USER_COMMENT},
{"int", ExifInterface.TAG_WHITE_BALANCE},
{"int", ExifInterface.TAG_GPS_ALTITUDE_REF},
{"string", ExifInterface.TAG_GPS_AREA_INFORMATION},
{"double", ExifInterface.TAG_GPS_DOP},
{"string", ExifInterface.TAG_GPS_DATESTAMP},
{"double", ExifInterface.TAG_GPS_DEST_BEARING},
{"string", ExifInterface.TAG_GPS_DEST_BEARING_REF},
{"double", ExifInterface.TAG_GPS_DEST_DISTANCE},
{"string", ExifInterface.TAG_GPS_DEST_DISTANCE_REF},
{"double", ExifInterface.TAG_GPS_DEST_LATITUDE},
{"string", ExifInterface.TAG_GPS_DEST_LATITUDE_REF},
{"double", ExifInterface.TAG_GPS_DEST_LONGITUDE},
{"string", ExifInterface.TAG_GPS_DEST_LONGITUDE_REF},
{"int", ExifInterface.TAG_GPS_DIFFERENTIAL},
{"double", ExifInterface.TAG_GPS_IMG_DIRECTION},
{"string", ExifInterface.TAG_GPS_IMG_DIRECTION_REF},
{"string", ExifInterface.TAG_GPS_LATITUDE_REF},
{"string", ExifInterface.TAG_GPS_LONGITUDE_REF},
{"string", ExifInterface.TAG_GPS_MAP_DATUM},
{"string", ExifInterface.TAG_GPS_MEASURE_MODE},
{"string", ExifInterface.TAG_GPS_PROCESSING_METHOD},
{"string", ExifInterface.TAG_GPS_SATELLITES},
{"double", ExifInterface.TAG_GPS_SPEED},
{"string", ExifInterface.TAG_GPS_SPEED_REF},
{"string", ExifInterface.TAG_GPS_STATUS},
{"string", ExifInterface.TAG_GPS_TIMESTAMP},
{"double", ExifInterface.TAG_GPS_TRACK},
{"string", ExifInterface.TAG_GPS_TRACK_REF},
{"string", ExifInterface.TAG_GPS_VERSION_ID},
{"string", ExifInterface.TAG_INTEROPERABILITY_INDEX},
{"int", ExifInterface.TAG_THUMBNAIL_IMAGE_LENGTH},
{"int", ExifInterface.TAG_THUMBNAIL_IMAGE_WIDTH},
{"int", ExifInterface.TAG_DNG_VERSION},
{"int", ExifInterface.TAG_DEFAULT_CROP_SIZE},
{"int", ExifInterface.TAG_ORF_PREVIEW_IMAGE_START},
{"int", ExifInterface.TAG_ORF_PREVIEW_IMAGE_LENGTH},
{"int", ExifInterface.TAG_ORF_ASPECT_FRAME},
{"int", ExifInterface.TAG_RW2_SENSOR_BOTTOM_BORDER},
{"int", ExifInterface.TAG_RW2_SENSOR_LEFT_BORDER},
{"int", ExifInterface.TAG_RW2_SENSOR_RIGHT_BORDER},
{"int", ExifInterface.TAG_RW2_SENSOR_TOP_BORDER},
{"int", ExifInterface.TAG_RW2_ISO},
};
// Mount error event
static {
String[][] strArr = new String[129][];
strArr[0] = new String[]{"string", ExifInterface.TAG_ARTIST};
strArr[1] = new String[]{"int", ExifInterface.TAG_BITS_PER_SAMPLE};
strArr[2] = new String[]{"int", ExifInterface.TAG_COMPRESSION};
strArr[3] = new String[]{"string", ExifInterface.TAG_COPYRIGHT};
strArr[4] = new String[]{"string", ExifInterface.TAG_DATETIME};
strArr[5] = new String[]{"string", ExifInterface.TAG_IMAGE_DESCRIPTION};
strArr[6] = new String[]{"int", ExifInterface.TAG_IMAGE_LENGTH};
strArr[7] = new String[]{"int", ExifInterface.TAG_IMAGE_WIDTH};
strArr[8] = new String[]{"int", ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT};
strArr[9] = new String[]{"int", ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT_LENGTH};
strArr[10] = new String[]{"string", ExifInterface.TAG_MAKE};
strArr[11] = new String[]{"string", ExifInterface.TAG_MODEL};
strArr[12] = new String[]{"int", ExifInterface.TAG_ORIENTATION};
strArr[13] = new String[]{"int", ExifInterface.TAG_PHOTOMETRIC_INTERPRETATION};
strArr[14] = new String[]{"int", ExifInterface.TAG_PLANAR_CONFIGURATION};
strArr[15] = new String[]{"double", ExifInterface.TAG_PRIMARY_CHROMATICITIES};
strArr[16] = new String[]{"double", ExifInterface.TAG_REFERENCE_BLACK_WHITE};
strArr[17] = new String[]{"int", ExifInterface.TAG_RESOLUTION_UNIT};
strArr[18] = new String[]{"int", ExifInterface.TAG_ROWS_PER_STRIP};
strArr[19] = new String[]{"int", ExifInterface.TAG_SAMPLES_PER_PIXEL};
strArr[20] = new String[]{"string", ExifInterface.TAG_SOFTWARE};
strArr[21] = new String[]{"int", ExifInterface.TAG_STRIP_BYTE_COUNTS};
strArr[22] = new String[]{"int", ExifInterface.TAG_STRIP_OFFSETS};
strArr[23] = new String[]{"int", ExifInterface.TAG_TRANSFER_FUNCTION};
strArr[24] = new String[]{"double", ExifInterface.TAG_WHITE_POINT};
strArr[25] = new String[]{"double", ExifInterface.TAG_X_RESOLUTION};
strArr[26] = new String[]{"double", ExifInterface.TAG_Y_CB_CR_COEFFICIENTS};
strArr[27] = new String[]{"int", ExifInterface.TAG_Y_CB_CR_POSITIONING};
strArr[28] = new String[]{"int", ExifInterface.TAG_Y_CB_CR_SUB_SAMPLING};
strArr[29] = new String[]{"double", ExifInterface.TAG_Y_RESOLUTION};
strArr[30] = new String[]{"double", ExifInterface.TAG_APERTURE_VALUE};
strArr[31] = new String[]{"double", ExifInterface.TAG_BRIGHTNESS_VALUE};
strArr[32] = new String[]{"string", ExifInterface.TAG_CFA_PATTERN};
strArr[33] = new String[]{"int", ExifInterface.TAG_COLOR_SPACE};
strArr[34] = new String[]{"string", ExifInterface.TAG_COMPONENTS_CONFIGURATION};
strArr[35] = new String[]{"double", ExifInterface.TAG_COMPRESSED_BITS_PER_PIXEL};
strArr[36] = new String[]{"int", ExifInterface.TAG_CONTRAST};
strArr[37] = new String[]{"int", ExifInterface.TAG_CUSTOM_RENDERED};
strArr[38] = new String[]{"string", ExifInterface.TAG_DATETIME_DIGITIZED};
strArr[39] = new String[]{"string", ExifInterface.TAG_DATETIME_ORIGINAL};
strArr[40] = new String[]{"string", ExifInterface.TAG_DEVICE_SETTING_DESCRIPTION};
strArr[41] = new String[]{"double", ExifInterface.TAG_DIGITAL_ZOOM_RATIO};
strArr[42] = new String[]{"string", ExifInterface.TAG_EXIF_VERSION};
strArr[43] = new String[]{"double", ExifInterface.TAG_EXPOSURE_BIAS_VALUE};
strArr[44] = new String[]{"double", ExifInterface.TAG_EXPOSURE_INDEX};
strArr[45] = new String[]{"int", ExifInterface.TAG_EXPOSURE_MODE};
strArr[46] = new String[]{"int", ExifInterface.TAG_EXPOSURE_PROGRAM};
strArr[47] = new String[]{"double", ExifInterface.TAG_EXPOSURE_TIME};
strArr[48] = new String[]{"double", ExifInterface.TAG_F_NUMBER};
strArr[49] = new String[]{"string", ExifInterface.TAG_FILE_SOURCE};
strArr[50] = new String[]{"int", ExifInterface.TAG_FLASH};
strArr[51] = new String[]{"double", ExifInterface.TAG_FLASH_ENERGY};
strArr[52] = new String[]{"string", ExifInterface.TAG_FLASHPIX_VERSION};
strArr[53] = new String[]{"double", ExifInterface.TAG_FOCAL_LENGTH};
strArr[54] = new String[]{"int", ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM};
strArr[55] = new String[]{"int", ExifInterface.TAG_FOCAL_PLANE_RESOLUTION_UNIT};
strArr[56] = new String[]{"double", ExifInterface.TAG_FOCAL_PLANE_X_RESOLUTION};
strArr[57] = new String[]{"double", ExifInterface.TAG_FOCAL_PLANE_Y_RESOLUTION};
strArr[58] = new String[]{"int", ExifInterface.TAG_GAIN_CONTROL};
strArr[59] = new String[]{"int", ExifInterface.TAG_ISO_SPEED_RATINGS};
strArr[60] = new String[]{"string", ExifInterface.TAG_IMAGE_UNIQUE_ID};
strArr[61] = new String[]{"int", ExifInterface.TAG_LIGHT_SOURCE};
strArr[62] = new String[]{"string", ExifInterface.TAG_MAKER_NOTE};
strArr[63] = new String[]{"double", ExifInterface.TAG_MAX_APERTURE_VALUE};
strArr[64] = new String[]{"int", ExifInterface.TAG_METERING_MODE};
strArr[65] = new String[]{"int", ExifInterface.TAG_NEW_SUBFILE_TYPE};
strArr[66] = new String[]{"string", ExifInterface.TAG_OECF};
strArr[67] = new String[]{"int", ExifInterface.TAG_PIXEL_X_DIMENSION};
strArr[68] = new String[]{"int", ExifInterface.TAG_PIXEL_Y_DIMENSION};
strArr[69] = new String[]{"string", ExifInterface.TAG_RELATED_SOUND_FILE};
strArr[70] = new String[]{"int", ExifInterface.TAG_SATURATION};
strArr[71] = new String[]{"int", ExifInterface.TAG_SCENE_CAPTURE_TYPE};
strArr[72] = new String[]{"string", ExifInterface.TAG_SCENE_TYPE};
strArr[73] = new String[]{"int", ExifInterface.TAG_SENSING_METHOD};
strArr[74] = new String[]{"int", ExifInterface.TAG_SHARPNESS};
strArr[75] = new String[]{"double", ExifInterface.TAG_SHUTTER_SPEED_VALUE};
strArr[76] = new String[]{"string", ExifInterface.TAG_SPATIAL_FREQUENCY_RESPONSE};
strArr[77] = new String[]{"string", ExifInterface.TAG_SPECTRAL_SENSITIVITY};
strArr[78] = new String[]{"int", ExifInterface.TAG_SUBFILE_TYPE};
strArr[79] = new String[]{"string", ExifInterface.TAG_SUBSEC_TIME};
strArr[80] = new String[]{"string", ExifInterface.TAG_SUBSEC_TIME_DIGITIZED};
strArr[81] = new String[]{"string", ExifInterface.TAG_SUBSEC_TIME_ORIGINAL};
strArr[82] = new String[]{"int", ExifInterface.TAG_SUBJECT_AREA};
strArr[83] = new String[]{"double", ExifInterface.TAG_SUBJECT_DISTANCE};
strArr[84] = new String[]{"int", ExifInterface.TAG_SUBJECT_DISTANCE_RANGE};
strArr[85] = new String[]{"int", ExifInterface.TAG_SUBJECT_LOCATION};
strArr[86] = new String[]{"string", ExifInterface.TAG_USER_COMMENT};
strArr[87] = new String[]{"int", ExifInterface.TAG_WHITE_BALANCE};
strArr[88] = new String[]{"int", ExifInterface.TAG_GPS_ALTITUDE_REF};
strArr[89] = new String[]{"string", ExifInterface.TAG_GPS_AREA_INFORMATION};
strArr[90] = new String[]{"double", ExifInterface.TAG_GPS_DOP};
strArr[91] = new String[]{"string", ExifInterface.TAG_GPS_DATESTAMP};
strArr[92] = new String[]{"double", ExifInterface.TAG_GPS_DEST_BEARING};
strArr[93] = new String[]{"string", ExifInterface.TAG_GPS_DEST_BEARING_REF};
strArr[94] = new String[]{"double", ExifInterface.TAG_GPS_DEST_DISTANCE};
strArr[95] = new String[]{"string", ExifInterface.TAG_GPS_DEST_DISTANCE_REF};
strArr[96] = new String[]{"double", ExifInterface.TAG_GPS_DEST_LATITUDE};
strArr[97] = new String[]{"string", ExifInterface.TAG_GPS_DEST_LATITUDE_REF};
strArr[98] = new String[]{"double", ExifInterface.TAG_GPS_DEST_LONGITUDE};
strArr[99] = new String[]{"string", ExifInterface.TAG_GPS_DEST_LONGITUDE_REF};
strArr[100] = new String[]{"int", ExifInterface.TAG_GPS_DIFFERENTIAL};
strArr[101] = new String[]{"double", ExifInterface.TAG_GPS_IMG_DIRECTION};
strArr[102] = new String[]{"string", ExifInterface.TAG_GPS_IMG_DIRECTION_REF};
strArr[103] = new String[]{"string", ExifInterface.TAG_GPS_LATITUDE_REF};
strArr[104] = new String[]{"string", ExifInterface.TAG_GPS_LONGITUDE_REF};
strArr[105] = new String[]{"string", ExifInterface.TAG_GPS_MAP_DATUM};
strArr[106] = new String[]{"string", ExifInterface.TAG_GPS_MEASURE_MODE};
strArr[107] = new String[]{"string", ExifInterface.TAG_GPS_PROCESSING_METHOD};
strArr[108] = new String[]{"string", ExifInterface.TAG_GPS_SATELLITES};
strArr[109] = new String[]{"double", ExifInterface.TAG_GPS_SPEED};
strArr[110] = new String[]{"string", ExifInterface.TAG_GPS_SPEED_REF};
strArr[111] = new String[]{"string", ExifInterface.TAG_GPS_STATUS};
strArr[112] = new String[]{"string", ExifInterface.TAG_GPS_TIMESTAMP};
strArr[113] = new String[]{"double", ExifInterface.TAG_GPS_TRACK};
strArr[114] = new String[]{"string", ExifInterface.TAG_GPS_TRACK_REF};
strArr[115] = new String[]{"string", ExifInterface.TAG_GPS_VERSION_ID};
strArr[116] = new String[]{"string", ExifInterface.TAG_INTEROPERABILITY_INDEX};
strArr[117] = new String[]{"int", ExifInterface.TAG_THUMBNAIL_IMAGE_LENGTH};
strArr[118] = new String[]{"int", ExifInterface.TAG_THUMBNAIL_IMAGE_WIDTH};
strArr[119] = new String[]{"int", ExifInterface.TAG_DNG_VERSION};
strArr[120] = new String[]{"int", ExifInterface.TAG_DEFAULT_CROP_SIZE};
strArr[Imgproc.COLOR_YUV2RGBA_YVYU] = new String[]{"int", ExifInterface.TAG_ORF_PREVIEW_IMAGE_START};
strArr[Imgproc.COLOR_YUV2BGRA_YVYU] = new String[]{"int", ExifInterface.TAG_ORF_PREVIEW_IMAGE_LENGTH};
strArr[123] = new String[]{"int", ExifInterface.TAG_ORF_ASPECT_FRAME};
strArr[124] = new String[]{"int", ExifInterface.TAG_RW2_SENSOR_BOTTOM_BORDER};
strArr[125] = new String[]{"int", ExifInterface.TAG_RW2_SENSOR_LEFT_BORDER};
strArr[126] = new String[]{"int", ExifInterface.TAG_RW2_SENSOR_RIGHT_BORDER};
strArr[127] = new String[]{"int", ExifInterface.TAG_RW2_SENSOR_TOP_BORDER};
strArr[128] = new String[]{"int", ExifInterface.TAG_RW2_ISO};
exifTags = strArr;
}
public static void emitMountErrorEvent(ViewGroup view) {
CameraMountErrorEvent event = CameraMountErrorEvent.obtain(view.getId());
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
((UIManagerModule) ((ReactContext) view.getContext()).getNativeModule(UIManagerModule.class)).getEventDispatcher().dispatchEvent(CameraMountErrorEvent.obtain(view.getId()));
}
// Camera ready event
public static void emitCameraReadyEvent(ViewGroup view) {
CameraReadyEvent event = CameraReadyEvent.obtain(view.getId());
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
((UIManagerModule) ((ReactContext) view.getContext()).getNativeModule(UIManagerModule.class)).getEventDispatcher().dispatchEvent(CameraReadyEvent.obtain(view.getId()));
}
// Face detection events
public static void emitFacesDetectedEvent(
ViewGroup view,
SparseArray<Face> faces,
ImageDimensions dimensions
) {
public static void emitFacesDetectedEvent(ViewGroup view, SparseArray<Map<String, Float>> faces, ImageDimensions dimensions) {
float density = view.getResources().getDisplayMetrics().density;
double scaleX = (double) view.getWidth() / (dimensions.getWidth() * density);
double scaleY = (double) view.getHeight() / (dimensions.getHeight() * density);
FacesDetectedEvent event = FacesDetectedEvent.obtain(
view.getId(),
faces,
dimensions,
scaleX,
scaleY
);
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
((UIManagerModule) ((ReactContext) view.getContext()).getNativeModule(UIManagerModule.class)).getEventDispatcher().dispatchEvent(FacesDetectedEvent.obtain(view.getId(), faces, dimensions, ((double) view.getWidth()) / ((double) (((float) dimensions.getWidth()) * density)), ((double) view.getHeight()) / ((double) (((float) dimensions.getHeight()) * density))));
}
public static void emitFaceDetectionErrorEvent(ViewGroup view, RNFaceDetector faceDetector) {
FaceDetectionErrorEvent event = FaceDetectionErrorEvent.obtain(view.getId(), faceDetector);
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
((UIManagerModule) ((ReactContext) view.getContext()).getNativeModule(UIManagerModule.class)).getEventDispatcher().dispatchEvent(FaceDetectionErrorEvent.obtain(view.getId(), faceDetector));
}
// Bar code read event
public static void emitBarCodeReadEvent(ViewGroup view, Result barCode) {
BarCodeReadEvent event = BarCodeReadEvent.obtain(view.getId(), barCode);
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
((UIManagerModule) ((ReactContext) view.getContext()).getNativeModule(UIManagerModule.class)).getEventDispatcher().dispatchEvent(BarCodeReadEvent.obtain(view.getId(), barCode));
}
// Utilities
public static int getCorrectCameraRotation(int rotation, int facing) {
if (facing == CameraView.FACING_FRONT) {
return (rotation - 90 + 360) % 360;
} else {
return (-rotation + 90 + 360) % 360;
if (facing == 1) {
return ((rotation - 90) + 360) % 360;
}
return (((-rotation) + 90) + 360) % 360;
}
public static CamcorderProfile getCamcorderProfile(int quality) {
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
CamcorderProfile profile = CamcorderProfile.get(1);
switch (quality) {
case CameraModule.VIDEO_2160P:
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_2160P);
case 0:
if (VERSION.SDK_INT >= 21) {
return CamcorderProfile.get(8);
}
break;
case CameraModule.VIDEO_1080P:
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_1080P);
break;
case CameraModule.VIDEO_720P:
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_720P);
break;
case CameraModule.VIDEO_480P:
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
break;
case CameraModule.VIDEO_4x3:
profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
profile.videoFrameWidth = 640;
break;
return profile;
case 1:
return CamcorderProfile.get(6);
case 2:
return CamcorderProfile.get(5);
case 3:
return CamcorderProfile.get(4);
case 4:
profile = CamcorderProfile.get(4);
profile.videoFrameWidth = OlympusMakernoteDirectory.TAG_PREVIEW_IMAGE;
return profile;
default:
return profile;
}
return profile;
}
public static WritableMap getExifData(ExifInterface exifInterface) {
@ -258,42 +224,61 @@ public class RNCameraViewHelper {
String name = tagInfo[1];
if (exifInterface.getAttribute(name) != null) {
String type = tagInfo[0];
switch (type) {
case "string":
Object obj = -1;
switch (type.hashCode()) {
case -1325958191:
if (type.equals("double")) {
obj = 2;
break;
}
break;
case -891985903:
if (type.equals("string")) {
obj = null;
break;
}
break;
case 104431:
if (type.equals("int")) {
obj = 1;
break;
}
break;
}
switch (obj) {
case null:
exifMap.putString(name, exifInterface.getAttribute(name));
break;
case "int":
case 1:
exifMap.putInt(name, exifInterface.getAttributeInt(name, 0));
break;
case "double":
exifMap.putDouble(name, exifInterface.getAttributeDouble(name, 0));
case 2:
exifMap.putDouble(name, exifInterface.getAttributeDouble(name, 0.0d));
break;
default:
break;
}
}
}
double[] latLong = exifInterface.getLatLong();
if (latLong != null) {
exifMap.putDouble(ExifInterface.TAG_GPS_LATITUDE, latLong[0]);
exifMap.putDouble(ExifInterface.TAG_GPS_LONGITUDE, latLong[1]);
exifMap.putDouble(ExifInterface.TAG_GPS_ALTITUDE, exifInterface.getAltitude(0));
exifMap.putDouble(ExifInterface.TAG_GPS_ALTITUDE, exifInterface.getAltitude(0.0d));
}
return exifMap;
}
public static Bitmap generateSimulatorPhoto(int width, int height) {
Bitmap fakePhoto = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Bitmap fakePhoto = Bitmap.createBitmap(width, height, Config.ARGB_8888);
Canvas canvas = new Canvas(fakePhoto);
Paint background = new Paint();
background.setColor(Color.BLACK);
canvas.drawRect(0, 0, width, height, background);
background.setColor(ViewCompat.MEASURED_STATE_MASK);
canvas.drawRect(0.0f, 0.0f, (float) width, (float) height, background);
Paint textPaint = new Paint();
textPaint.setColor(Color.YELLOW);
textPaint.setTextSize(35);
Calendar calendar = Calendar.getInstance();
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd.MM.YY HH:mm:ss", Locale.getDefault());
canvas.drawText(simpleDateFormat.format(calendar.getTime()), width * 0.1f, height * 0.9f, textPaint);
textPaint.setColor(InputDeviceCompat.SOURCE_ANY);
textPaint.setTextSize(35.0f);
canvas.drawText(new SimpleDateFormat("dd.MM.YY HH:mm:ss", Locale.getDefault()).format(Calendar.getInstance().getTime()), ((float) width) * 0.1f, ((float) height) * 0.9f, textPaint);
return fakePhoto;
}
}
}

View File

@ -0,0 +1,76 @@
package org.reactnative.camera.events;
import android.support.v4.util.Pools.SynchronizedPool;
import android.util.SparseArray;
import com.brentvatne.react.ReactVideoView;
import com.brentvatne.react.ReactVideoViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import java.util.Map;
import org.reactnative.camera.CameraViewManager.Events;
import org.reactnative.camera.utils.ImageDimensions;
public class OpenCVProcessorFaceDetectedEvent extends Event<OpenCVProcessorFaceDetectedEvent> {
private static final SynchronizedPool<OpenCVProcessorFaceDetectedEvent> EVENTS_POOL = new SynchronizedPool(3);
private SparseArray<Map<String, Float>> mFaces;
private ImageDimensions mImageDimensions;
private double mScaleX;
private double mScaleY;
private OpenCVProcessorFaceDetectedEvent() {
}
public static OpenCVProcessorFaceDetectedEvent obtain(int viewTag, SparseArray<Map<String, Float>> faces, ImageDimensions dimensions, double scaleX, double scaleY) {
OpenCVProcessorFaceDetectedEvent event = (OpenCVProcessorFaceDetectedEvent) EVENTS_POOL.acquire();
if (event == null) {
event = new OpenCVProcessorFaceDetectedEvent();
}
event.init(viewTag, faces, dimensions, scaleX, scaleY);
return event;
}
private void init(int viewTag, SparseArray<Map<String, Float>> faces, ImageDimensions dimensions, double scaleX, double scaleY) {
super.init(viewTag);
this.mFaces = faces;
this.mImageDimensions = dimensions;
this.mScaleX = scaleX;
this.mScaleY = scaleY;
}
public short getCoalescingKey() {
if (this.mFaces.size() > 32767) {
return Short.MAX_VALUE;
}
return (short) this.mFaces.size();
}
public String getEventName() {
return Events.EVENT_ON_FACES_DETECTED.toString();
}
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableArray facesList = Arguments.createArray();
for (int i = 0; i < this.mFaces.size(); i++) {
Map<String, Float> face = (Map) this.mFaces.valueAt(i);
WritableMap serializedFace = Arguments.createMap();
serializedFace.putDouble("x", (double) ((Float) face.get("x")).floatValue());
serializedFace.putDouble("y", (double) ((Float) face.get("y")).floatValue());
serializedFace.putDouble("width", (double) ((Float) face.get("width")).floatValue());
serializedFace.putDouble("height", (double) ((Float) face.get("height")).floatValue());
serializedFace.putDouble(ReactVideoView.EVENT_PROP_ORIENTATION, 0.0d);
facesList.pushMap(serializedFace);
}
WritableMap event = Arguments.createMap();
event.putString(ReactVideoViewManager.PROP_SRC_TYPE, "face");
event.putArray("faces", facesList);
event.putInt("target", getViewTag());
return event;
}
}

View File

@ -0,0 +1,41 @@
package org.reactnative.camera.tasks;
import android.os.AsyncTask;
import android.util.SparseArray;
import java.util.Map;
import org.reactnative.opencv.OpenCVProcessor;
public class OpenCVProcessorAsyncTask extends AsyncTask<Void, Void, SparseArray<Map<String, Float>>> {
private OpenCVProcessorAsyncTaskDelegate mDelegate;
private int mHeight;
private byte[] mImageData;
private OpenCVProcessor mOpenCVProcessor;
private int mRotation;
private int mWidth;
public OpenCVProcessorAsyncTask(OpenCVProcessorAsyncTaskDelegate delegate, OpenCVProcessor openCVProcessor, byte[] imageData, int width, int height, int rotation) {
this.mImageData = imageData;
this.mWidth = width;
this.mHeight = height;
this.mRotation = rotation;
this.mDelegate = delegate;
this.mOpenCVProcessor = openCVProcessor;
}
protected SparseArray<Map<String, Float>> doInBackground(Void... ignored) {
if (isCancelled() || this.mDelegate == null || this.mOpenCVProcessor == null) {
return null;
}
return this.mOpenCVProcessor.detect(this.mImageData, this.mWidth, this.mHeight);
}
protected void onPostExecute(SparseArray<Map<String, Float>> faces) {
super.onPostExecute(faces);
if (faces == null) {
this.mDelegate.onFaceDetectionError(this.mOpenCVProcessor);
return;
}
this.mDelegate.onFacesDetected(faces, this.mWidth, this.mHeight, this.mRotation);
this.mDelegate.onFaceDetectingTaskCompleted();
}
}

View File

@ -0,0 +1,13 @@
package org.reactnative.camera.tasks;
import android.util.SparseArray;
import java.util.Map;
import org.reactnative.opencv.OpenCVProcessor;
public interface OpenCVProcessorAsyncTaskDelegate {
void onFaceDetectingTaskCompleted();
void onFaceDetectionError(OpenCVProcessor openCVProcessor);
void onFacesDetected(SparseArray<Map<String, Float>> sparseArray, int i, int i2, int i3);
}

View File

@ -0,0 +1,97 @@
package org.reactnative.opencv;
import android.content.Context;
import android.util.Log;
import android.util.SparseArray;
import com.facebook.react.common.ReactConstants;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Size;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.reactnative.camera.C0623R;
public class OpenCVProcessor {
private CascadeClassifier faceDetector;
private int frame = 0;
private Context reactContext;
public OpenCVProcessor(Context context) {
this.reactContext = context;
try {
InputStream is = this.reactContext.getResources().openRawResource(C0623R.raw.lbpcascade_frontalface_improved);
File mCascadeFile = new File(this.reactContext.getDir("cascade", 0), "lbpcascade_frontalface_improved.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
while (true) {
int bytesRead = is.read(buffer);
if (bytesRead == -1) {
break;
}
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
this.faceDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (this.faceDetector.empty()) {
Log.e(ReactConstants.TAG, "Failed to load cascade classifier");
this.faceDetector = null;
} else {
Log.i(ReactConstants.TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
}
} catch (IOException e) {
e.printStackTrace();
Log.e(ReactConstants.TAG, "Failed to load cascade. Exception thrown: " + e);
}
Log.d(ReactConstants.TAG, "---OpenCV Constructor---");
}
private void saveMatToDisk(Mat mat) {
Imgcodecs.imwrite(String.format("/sdcard/nect/%d.jpg", new Object[]{Long.valueOf(System.currentTimeMillis())}), mat);
}
public SparseArray<Map<String, Float>> detect(byte[] imageData, int width, int height) {
SparseArray<Map<String, Float>> faces = new SparseArray();
if (this.frame % 10 == 0) {
Mat mat = new Mat((height / 2) + height, width, CvType.CV_8UC1);
mat.put(0, 0, imageData);
Mat grayMat = new Mat();
Imgproc.cvtColor(mat, grayMat, 106);
Core.transpose(grayMat, grayMat);
Core.flip(grayMat, grayMat, -1);
float scale = 480.0f / ((float) grayMat.cols());
float imageHeight = ((float) grayMat.rows()) * scale;
Imgproc.resize(grayMat, grayMat, new Size(), (double) scale, (double) scale, 2);
if (this.frame == 30) {
Log.d(ReactConstants.TAG, "---SAVE IMAGE!!--- ");
saveMatToDisk(grayMat);
}
MatOfRect rec = new MatOfRect();
this.faceDetector.detectMultiScale(grayMat, rec, 1.2d, 3, 0, new Size(10.0d, 10.0d), new Size());
Rect[] detectedObjects = rec.toArray();
if (detectedObjects.length > 0) {
Log.d(ReactConstants.TAG, "---FOUND FACE!!--- ");
}
for (int i = 0; i < detectedObjects.length; i++) {
Map<String, Float> face = new HashMap();
face.put("x", Float.valueOf(((float) detectedObjects[i].f33x) / 480.0f));
face.put("y", Float.valueOf(((float) detectedObjects[i].f34y) / imageHeight));
face.put("width", Float.valueOf(((float) detectedObjects[i].width) / 480.0f));
face.put("height", Float.valueOf(((float) detectedObjects[i].height) / imageHeight));
faces.append(i, face);
}
}
this.frame++;
return faces;
}
}