Implement get view from reactTag on Android and iOS.

Do not save app context on CameraModule
This commit is contained in:
Joao Fidelis 2018-01-24 16:11:50 -02:00
parent 0b5ae8f9e3
commit a09436e9ab
5 changed files with 152 additions and 130 deletions

View File

@ -1,9 +1,8 @@
package org.reactnative.camera;
import android.content.Context;
import android.graphics.Bitmap;
import android.os.Build;
import org.reactnative.camera.utils.ScopedContext;
import org.reactnative.facedetector.RNFaceDetector;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
@ -11,9 +10,18 @@ import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.uimanager.NativeViewHierarchyManager;
import com.facebook.react.uimanager.UIBlock;
import com.facebook.react.uimanager.UIManagerModule;
import com.google.android.cameraview.AspectRatio;
import com.google.zxing.BarcodeFormat;
import org.reactnative.camera.tasks.ResolveTakenPictureAsyncTask;
import org.reactnative.camera.utils.ScopedContext;
import org.reactnative.facedetector.RNFaceDetector;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -24,9 +32,7 @@ import javax.annotation.Nullable;
public class CameraModule extends ReactContextBaseJavaModule {
private static final String TAG = "CameraModule";
private static ReactApplicationContext mReactContext;
private static ScopedContext mScopedContext;
private ScopedContext mScopedContext;
static final int VIDEO_2160P = 0;
static final int VIDEO_1080P = 1;
static final int VIDEO_720P = 2;
@ -58,15 +64,10 @@ public class CameraModule extends ReactContextBaseJavaModule {
public CameraModule(ReactApplicationContext reactContext) {
super(reactContext);
mReactContext = reactContext;
mScopedContext = new ScopedContext(mReactContext);
mScopedContext = new ScopedContext(reactContext);
}
public static ReactApplicationContext getReactContextSingleton() {
return mReactContext;
}
public static ScopedContext getScopedContext() {
public ScopedContext getScopedContext() {
return mScopedContext;
}
@ -183,31 +184,104 @@ public class CameraModule extends ReactContextBaseJavaModule {
}
@ReactMethod
public void takePicture(ReadableMap options, final Promise promise) {
CameraViewManager.getInstance().takePicture(options, promise);
}
@ReactMethod
public void record(ReadableMap options, final Promise promise) {
CameraViewManager.getInstance().record(options, promise);
}
@ReactMethod
public void stopRecording() {
CameraViewManager.getInstance().stopRecording();
}
@ReactMethod
public void getSupportedRatios(final Promise promise) {
WritableArray result = Arguments.createArray();
Set<AspectRatio> ratios = CameraViewManager.getInstance().getSupportedRatios();
if (ratios != null) {
for (AspectRatio ratio : ratios) {
result.pushString(ratio.toString());
public void takePicture(final ReadableMap options, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
final File cacheDirectory = mScopedContext.getCacheDirectory();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
RNCameraView cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
try {
if (!Build.FINGERPRINT.contains("generic")) {
if (cameraView.isCameraOpened()) {
cameraView.takePicture(options, promise, cacheDirectory);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} else {
Bitmap image = RNCameraViewHelper.generateSimulatorPhoto(cameraView.getWidth(), cameraView.getHeight());
ByteBuffer byteBuffer = ByteBuffer.allocate(image.getRowBytes() * image.getHeight());
image.copyPixelsToBuffer(byteBuffer);
new ResolveTakenPictureAsyncTask(byteBuffer.array(), promise, options).execute();
}
} catch (Exception e) {
promise.reject("E_CAMERA_BAD_VIEWTAG", "takePictureAsync: Expected a Camera component");
}
}
promise.resolve(result);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
});
}
@ReactMethod
public void record(final ReadableMap options, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
final File cacheDirectory = mScopedContext.getCacheDirectory();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.record(options, promise, cacheDirectory);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} catch (Exception e) {
promise.reject("E_CAMERA_BAD_VIEWTAG", "recordAsync: Expected a Camera component");
}
}
});
}
@ReactMethod
public void stopRecording(final int viewTag) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.stopRecording();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
@ReactMethod
public void getSupportedRatios(final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
WritableArray result = Arguments.createArray();
if (cameraView.isCameraOpened()) {
Set<AspectRatio> ratios = cameraView.getSupportedAspectRatios();
for (AspectRatio ratio : ratios) {
result.pushString(ratio.toString());
}
promise.resolve(result);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
}

View File

@ -45,16 +45,6 @@ public class CameraViewManager extends ViewGroupManager<RNCameraView> {
private static final String REACT_CLASS = "RNCamera";
private static CameraViewManager instance;
private RNCameraView mCameraView;
public CameraViewManager() {
super();
instance = this;
}
public static CameraViewManager getInstance() { return instance; }
@Override
public String getName() {
return REACT_CLASS;
@ -62,8 +52,7 @@ public class CameraViewManager extends ViewGroupManager<RNCameraView> {
@Override
protected RNCameraView createViewInstance(ThemedReactContext themedReactContext) {
mCameraView = new RNCameraView(themedReactContext);
return mCameraView;
return new RNCameraView(themedReactContext);
}
@Override
@ -147,40 +136,4 @@ public class CameraViewManager extends ViewGroupManager<RNCameraView> {
public void setFaceDetectionClassifications(RNCameraView view, int classifications) {
view.setFaceDetectionClassifications(classifications);
}
public void takePicture(ReadableMap options, Promise promise) {
if (!Build.FINGERPRINT.contains("generic")) {
if (mCameraView.isCameraOpened()) {
mCameraView.takePicture(options, promise);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} else {
Bitmap image = RNCameraViewHelper.generateSimulatorPhoto(mCameraView.getWidth(), mCameraView.getHeight());
ByteBuffer byteBuffer = ByteBuffer.allocate(image.getRowBytes() * image.getHeight());
image.copyPixelsToBuffer(byteBuffer);
new ResolveTakenPictureAsyncTask(byteBuffer.array(), promise, options).execute();
}
}
public void record(final ReadableMap options, final Promise promise) {
if (mCameraView.isCameraOpened()) {
mCameraView.record(options, promise);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
}
public void stopRecording() {
if (mCameraView.isCameraOpened()) {
mCameraView.stopRecording();
}
}
public Set<AspectRatio> getSupportedRatios() {
if (mCameraView.isCameraOpened()) {
return mCameraView.getSupportedAspectRatios();
}
return null;
}
}

View File

@ -45,6 +45,7 @@ import java.util.concurrent.ConcurrentLinkedQueue;
public class RNCameraView extends CameraView implements LifecycleEventListener, BarCodeScannerAsyncTaskDelegate, FaceDetectorAsyncTaskDelegate {
private Queue<Promise> mPictureTakenPromises = new ConcurrentLinkedQueue<>();
private Map<Promise, ReadableMap> mPictureTakenOptions = new ConcurrentHashMap<>();
private Map<Promise, File> mPictureTakenDirectories = new ConcurrentHashMap<>();
private Promise mVideoRecordedPromise;
private List<String> mBarCodeTypes = null;
@ -83,7 +84,7 @@ public class RNCameraView extends CameraView implements LifecycleEventListener,
public void onPictureTaken(CameraView cameraView, final byte[] data) {
Promise promise = mPictureTakenPromises.poll();
ReadableMap options = mPictureTakenOptions.remove(promise);
File cacheDirectory = CameraModule.getScopedContext().getCacheDirectory();
final File cacheDirectory = mPictureTakenDirectories.remove(promise);
new ResolveTakenPictureAsyncTask(data, promise, options, cacheDirectory).execute();
}
@ -151,15 +152,15 @@ public class RNCameraView extends CameraView implements LifecycleEventListener,
initBarcodeReader();
}
public void takePicture(ReadableMap options, final Promise promise) {
public void takePicture(ReadableMap options, final Promise promise, File cacheDirectory) {
mPictureTakenPromises.add(promise);
mPictureTakenOptions.put(promise, options);
mPictureTakenDirectories.put(promise, cacheDirectory);
super.takePicture();
}
public void record(ReadableMap options, final Promise promise) {
public void record(ReadableMap options, final Promise promise, File cacheDirectory) {
try {
File cacheDirectory = CameraModule.getScopedContext().getCacheDirectory();
String path = RNFileUtils.getOutputFilePath(cacheDirectory, ".mp4");
int maxDuration = options.hasKey("maxDuration") ? options.getInt("maxDuration") : -1;
int maxFileSize = options.hasKey("maxFileSize") ? options.getInt("maxFileSize") : -1;

View File

@ -161,6 +161,7 @@ RCT_CUSTOM_VIEW_PROPERTY(barCodeTypes, NSArray, RNCamera)
RCT_REMAP_METHOD(takePicture,
options:(NSDictionary *)options
reactTag:(nonnull NSNumber *)reactTag
resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject)
{
@ -178,17 +179,10 @@ RCT_REMAP_METHOD(takePicture,
}
resolve(response);
#else
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, UIView *> *viewRegistry) {
RNCamera *view = nil;
for (NSNumber *reactTag in viewRegistry) {
UIView *reactView = viewRegistry[reactTag];
if ([reactView isKindOfClass:[RNCamera class]]) {
view = (RNCamera *)reactView;
break;
}
}
if (!view) {
RCTLogError(@"Could not find RNCamera view on viewRegistry");
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else {
[view takePicture:options resolve:resolve reject:reject];
}
@ -198,6 +192,7 @@ RCT_REMAP_METHOD(takePicture,
RCT_REMAP_METHOD(record,
withOptions:(NSDictionary *)options
reactTag:(nonnull NSNumber *)reactTag
resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject)
{
@ -205,36 +200,22 @@ RCT_REMAP_METHOD(record,
reject(@"E_RECORDING_FAILED", @"Video recording is not supported on a simulator.", nil);
return;
#endif
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, UIView *> *viewRegistry) {
RNCamera *view = nil;
for (NSNumber *reactTag in viewRegistry) {
UIView *reactView = viewRegistry[reactTag];
if ([reactView isKindOfClass:[RNCamera class]]) {
view = (RNCamera *)reactView;
break;
}
}
if (!view) {
RCTLogError(@"Could not find RNCamera view on viewRegistry");
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else {
[view record:options resolve:resolve reject:reject];
}
}];
}
RCT_EXPORT_METHOD(stopRecording)
RCT_REMAP_METHOD(stopRecording, reactTag:(nonnull NSNumber *)reactTag)
{
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, UIView *> *viewRegistry) {
RNCamera *view = nil;
for (NSNumber *reactTag in viewRegistry) {
UIView *reactView = viewRegistry[reactTag];
if ([reactView isKindOfClass:[RNCamera class]]) {
view = (RNCamera *)reactView;
break;
}
}
if (!view) {
RCTLogError(@"Could not find RNCamera view on viewRegistry");
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else {
[view stopRecording];
}

View File

@ -2,7 +2,7 @@
import React from 'react';
import PropTypes from 'prop-types';
import { mapValues } from 'lodash';
import { Platform, NativeModules, ViewPropTypes, requireNativeComponent } from 'react-native';
import { findNodeHandle, Platform, NativeModules, ViewPropTypes, requireNativeComponent } from 'react-native';
import type { FaceFeature } from './FaceDetector';
@ -124,6 +124,8 @@ export default class Camera extends React.Component<PropsType> {
faceDetectionClassifications: CameraManager.FaceDetection.Classifications.none,
};
_cameraRef: ?Object;
_cameraHandle: ?number;
_lastEvents: { [string]: string };
_lastEventsTimes: { [string]: Date };
@ -140,12 +142,12 @@ export default class Camera extends React.Component<PropsType> {
if (!options.quality) {
options.quality = 1;
}
return await CameraManager.takePicture(options);
return await CameraManager.takePicture(options, this._cameraHandle);
}
async getSupportedRatiosAsync() {
if (Platform.OS === 'android') {
return await CameraManager.getSupportedRatios();
return await CameraManager.getSupportedRatios(this._cameraHandle);
} else {
throw new Error('Ratio is not supported on iOS');
}
@ -157,11 +159,11 @@ export default class Camera extends React.Component<PropsType> {
} else if (typeof options.quality === 'string') {
options.quality = Camera.Constants.VideoQuality[options.quality];
}
return await CameraManager.record(options);
return await CameraManager.record(options, this._cameraHandle);
}
stopRecording() {
CameraManager.stopRecording();
CameraManager.stopRecording(this._cameraHandle);
}
_onMountError = () => {
@ -195,14 +197,25 @@ export default class Camera extends React.Component<PropsType> {
}
};
_setReference = (ref: ?Object) => {
if (ref) {
this._cameraRef = ref;
this._cameraHandle = findNodeHandle(ref);
} else {
this._cameraRef = null;
this._cameraHandle = null;
}
};
render() {
const nativeProps = this._convertNativeProps(this.props);
return (
<RNCamera
{...nativeProps}
ref={this._setReference}
onMountError={this._onMountError}
onCameraRead={this._onCameraReady}
onCameraReady={this._onCameraReady}
onBarCodeRead={this._onObjectDetected(this.props.onBarCodeRead)}
onFacesDetected={this._onObjectDetected(this.props.onFacesDetected)}
/>