Merge pull request #1600 from react-native-community/feat/preview

Feat/preview
This commit is contained in:
João Guilherme Fidelis 2018-06-04 09:33:41 -03:00 committed by GitHub
commit e9cadb7e01
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 518 additions and 50 deletions

View File

@ -78,8 +78,12 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
private final SizeMap mPreviewSizes = new SizeMap(); private final SizeMap mPreviewSizes = new SizeMap();
private boolean mIsPreviewActive = false;
private final SizeMap mPictureSizes = new SizeMap(); private final SizeMap mPictureSizes = new SizeMap();
private Size mPictureSize;
private AspectRatio mAspectRatio; private AspectRatio mAspectRatio;
private boolean mShowingPreview; private boolean mShowingPreview;
@ -107,6 +111,7 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
public void onSurfaceChanged() { public void onSurfaceChanged() {
if (mCamera != null) { if (mCamera != null) {
setUpPreview(); setUpPreview();
mIsPreviewActive = false;
adjustCameraParameters(); adjustCameraParameters();
} }
} }
@ -164,6 +169,7 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
final boolean needsToStopPreview = mShowingPreview && Build.VERSION.SDK_INT < 14; final boolean needsToStopPreview = mShowingPreview && Build.VERSION.SDK_INT < 14;
if (needsToStopPreview) { if (needsToStopPreview) {
mCamera.stopPreview(); mCamera.stopPreview();
mIsPreviewActive = false;
} }
mCamera.setPreviewDisplay(mPreview.getSurfaceHolder()); mCamera.setPreviewDisplay(mPreview.getSurfaceHolder());
if (needsToStopPreview) { if (needsToStopPreview) {
@ -179,11 +185,23 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
private void startCameraPreview() { private void startCameraPreview() {
mCamera.startPreview(); mCamera.startPreview();
mIsPreviewActive = true;
if (mIsScanning) { if (mIsScanning) {
mCamera.setPreviewCallback(this); mCamera.setPreviewCallback(this);
} }
} }
@Override
public void resumePreview() {
startCameraPreview();
}
@Override
public void pausePreview() {
mCamera.stopPreview();
mIsPreviewActive = false;
}
@Override @Override
boolean isCameraOpened() { boolean isCameraOpened() {
return mCamera != null; return mCamera != null;
@ -217,6 +235,25 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
return idealAspectRatios.ratios(); return idealAspectRatios.ratios();
} }
@Override
SortedSet<Size> getAvailablePictureSizes(AspectRatio ratio) {
return mPictureSizes.sizes(ratio);
}
@Override
void setPictureSize(Size size) {
mPictureSize = size;
if (mCameraParameters != null && mCamera != null) {
mCameraParameters.setPictureSize(size.getWidth(), size.getHeight());
mCamera.setParameters(mCameraParameters);
}
}
@Override
Size getPictureSize() {
return mPictureSize;
}
@Override @Override
boolean setAspectRatio(AspectRatio ratio) { boolean setAspectRatio(AspectRatio ratio) {
if (mAspectRatio == null || !isCameraOpened()) { if (mAspectRatio == null || !isCameraOpened()) {
@ -334,6 +371,9 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
throw new IllegalStateException( throw new IllegalStateException(
"Camera is not ready. Call start() before takePicture()."); "Camera is not ready. Call start() before takePicture().");
} }
if (!mIsPreviewActive) {
throw new IllegalStateException("Preview is paused - resume it before taking a picture.");
}
if (getAutoFocus()) { if (getAutoFocus()) {
mCamera.cancelAutoFocus(); mCamera.cancelAutoFocus();
mCamera.autoFocus(new Camera.AutoFocusCallback() { mCamera.autoFocus(new Camera.AutoFocusCallback() {
@ -355,6 +395,7 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
isPictureCaptureInProgress.set(false); isPictureCaptureInProgress.set(false);
camera.cancelAutoFocus(); camera.cancelAutoFocus();
camera.startPreview(); camera.startPreview();
mIsPreviewActive = true;
if (mIsScanning) { if (mIsScanning) {
camera.setPreviewCallback(Camera1.this); camera.setPreviewCallback(Camera1.this);
} }
@ -403,6 +444,7 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
final boolean needsToStopPreview = mShowingPreview && Build.VERSION.SDK_INT < 14; final boolean needsToStopPreview = mShowingPreview && Build.VERSION.SDK_INT < 14;
if (needsToStopPreview) { if (needsToStopPreview) {
mCamera.stopPreview(); mCamera.stopPreview();
mIsPreviewActive = false;
} }
mCamera.setDisplayOrientation(calcDisplayOrientation(displayOrientation)); mCamera.setDisplayOrientation(calcDisplayOrientation(displayOrientation));
if (needsToStopPreview) { if (needsToStopPreview) {
@ -420,6 +462,7 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
} }
mCamera.stopPreview(); mCamera.stopPreview();
mIsPreviewActive = false;
if (surfaceTexture == null) { if (surfaceTexture == null) {
mCamera.setPreviewTexture((SurfaceTexture) mPreview.getSurfaceTexture()); mCamera.setPreviewTexture((SurfaceTexture) mPreview.getSurfaceTexture());
@ -504,13 +547,15 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
Size size = chooseOptimalSize(sizes); Size size = chooseOptimalSize(sizes);
// Always re-apply camera parameters // Always re-apply camera parameters
// Largest picture size in this ratio if (mPictureSize == null) {
final Size pictureSize = mPictureSizes.sizes(mAspectRatio).last(); mPictureSize = mPictureSizes.sizes(mAspectRatio).last();
}
if (mShowingPreview) { if (mShowingPreview) {
mCamera.stopPreview(); mCamera.stopPreview();
mIsPreviewActive = false;
} }
mCameraParameters.setPreviewSize(size.getWidth(), size.getHeight()); mCameraParameters.setPreviewSize(size.getWidth(), size.getHeight());
mCameraParameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight()); mCameraParameters.setPictureSize(mPictureSize.getWidth(), mPictureSize.getHeight());
mCameraParameters.setRotation(calcCameraRotation(mDisplayOrientation)); mCameraParameters.setRotation(calcCameraRotation(mDisplayOrientation));
setAutoFocusInternal(mAutoFocus); setAutoFocusInternal(mAutoFocus);
setFlashInternal(mFlash); setFlashInternal(mFlash);
@ -555,6 +600,7 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
if (mCamera != null) { if (mCamera != null) {
mCamera.release(); mCamera.release();
mCamera = null; mCamera = null;
mPictureSize = null;
mCallback.onCameraClosed(); mCallback.onCameraClosed();
} }
} }
@ -646,7 +692,6 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
String currentMode = FLASH_MODES.get(mFlash); String currentMode = FLASH_MODES.get(mFlash);
if (modes == null || !modes.contains(currentMode)) { if (modes == null || !modes.contains(currentMode)) {
mCameraParameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); mCameraParameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
mFlash = Constants.FLASH_OFF;
return true; return true;
} }
return false; return false;
@ -687,7 +732,6 @@ class Camera1 extends CameraViewImpl implements MediaRecorder.OnInfoListener,
String currentMode = WB_MODES.get(mWhiteBalance); String currentMode = WB_MODES.get(mWhiteBalance);
if (modes == null || !modes.contains(currentMode)) { if (modes == null || !modes.contains(currentMode)) {
mCameraParameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO); mCameraParameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
mWhiteBalance = Constants.WB_AUTO;
return true; return true;
} }
return false; return false;

View File

@ -216,6 +216,8 @@ class Camera2 extends CameraViewImpl implements MediaRecorder.OnInfoListener, Me
private final SizeMap mPictureSizes = new SizeMap(); private final SizeMap mPictureSizes = new SizeMap();
private Size mPictureSize;
private int mFacing; private int mFacing;
private AspectRatio mAspectRatio = Constants.DEFAULT_ASPECT_RATIO; private AspectRatio mAspectRatio = Constants.DEFAULT_ASPECT_RATIO;
@ -345,6 +347,35 @@ class Camera2 extends CameraViewImpl implements MediaRecorder.OnInfoListener, Me
return mPreviewSizes.ratios(); return mPreviewSizes.ratios();
} }
@Override
SortedSet<Size> getAvailablePictureSizes(AspectRatio ratio) {
return mPictureSizes.sizes(ratio);
}
@Override
void setPictureSize(Size size) {
if (mCaptureSession != null) {
try {
mCaptureSession.stopRepeating();
} catch (CameraAccessException e) {
e.printStackTrace();
}
mCaptureSession.close();
mCaptureSession = null;
}
if (mStillImageReader != null) {
mStillImageReader.close();
}
mPictureSize = size;
prepareStillImageReader();
startCaptureSession();
}
@Override
Size getPictureSize() {
return mPictureSize;
}
@Override @Override
boolean setAspectRatio(AspectRatio ratio) { boolean setAspectRatio(AspectRatio ratio) {
if (ratio != null && mPreviewSizes.isEmpty()) { if (ratio != null && mPreviewSizes.isEmpty()) {
@ -653,6 +684,9 @@ class Camera2 extends CameraViewImpl implements MediaRecorder.OnInfoListener, Me
} }
mPictureSizes.clear(); mPictureSizes.clear();
collectPictureSizes(mPictureSizes, map); collectPictureSizes(mPictureSizes, map);
if (mPictureSize == null) {
mPictureSize = mPictureSizes.sizes(mAspectRatio).last();
}
for (AspectRatio ratio : mPreviewSizes.ratios()) { for (AspectRatio ratio : mPreviewSizes.ratios()) {
if (!mPictureSizes.ratios().contains(ratio)) { if (!mPictureSizes.ratios().contains(ratio)) {
mPreviewSizes.remove(ratio); mPreviewSizes.remove(ratio);
@ -674,8 +708,7 @@ class Camera2 extends CameraViewImpl implements MediaRecorder.OnInfoListener, Me
if (mStillImageReader != null) { if (mStillImageReader != null) {
mStillImageReader.close(); mStillImageReader.close();
} }
Size largest = mPictureSizes.sizes(mAspectRatio).last(); mStillImageReader = ImageReader.newInstance(mPictureSize.getWidth(), mPictureSize.getHeight(),
mStillImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
ImageFormat.JPEG, 1); ImageFormat.JPEG, 1);
mStillImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null); mStillImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);
} }
@ -728,6 +761,20 @@ class Camera2 extends CameraViewImpl implements MediaRecorder.OnInfoListener, Me
} }
} }
@Override
public void resumePreview() {
startCaptureSession();
}
@Override
public void pausePreview() {
try {
mCaptureSession.stopRepeating();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
public Surface getPreviewSurface() { public Surface getPreviewSurface() {
if (mPreviewSurface != null) { if (mPreviewSurface != null) {
return mPreviewSurface; return mPreviewSurface;

View File

@ -37,6 +37,7 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Set; import java.util.Set;
import java.util.SortedSet;
public class CameraView extends FrameLayout { public class CameraView extends FrameLayout {
@ -219,6 +220,7 @@ public class CameraView extends FrameLayout {
state.zoom = getZoom(); state.zoom = getZoom();
state.whiteBalance = getWhiteBalance(); state.whiteBalance = getWhiteBalance();
state.scanning = getScanning(); state.scanning = getScanning();
state.pictureSize = getPictureSize();
return state; return state;
} }
@ -238,6 +240,7 @@ public class CameraView extends FrameLayout {
setZoom(ss.zoom); setZoom(ss.zoom);
setWhiteBalance(ss.whiteBalance); setWhiteBalance(ss.whiteBalance);
setScanning(ss.scanning); setScanning(ss.scanning);
setPictureSize(ss.pictureSize);
} }
public void setUsingCamera2Api(boolean useCamera2) { public void setUsingCamera2Api(boolean useCamera2) {
@ -403,6 +406,31 @@ public class CameraView extends FrameLayout {
return mImpl.getAspectRatio(); return mImpl.getAspectRatio();
} }
/**
* Gets all the picture sizes for particular ratio supported by the current camera.
*
* @param ratio {@link AspectRatio} for which the available image sizes will be returned.
*/
public SortedSet<Size> getAvailablePictureSizes(@NonNull AspectRatio ratio) {
return mImpl.getAvailablePictureSizes(ratio);
}
/**
* Sets the size of taken pictures.
*
* @param size The {@link Size} to be set.
*/
public void setPictureSize(@NonNull Size size) {
mImpl.setPictureSize(size);
}
/**
* Gets the size of pictures that will be taken.
*/
public Size getPictureSize() {
return mImpl.getPictureSize();
}
/** /**
* Enables or disables the continuous auto-focus mode. When the current camera doesn't support * Enables or disables the continuous auto-focus mode. When the current camera doesn't support
* auto-focus, calling this method will be ignored. * auto-focus, calling this method will be ignored.
@ -495,6 +523,14 @@ public class CameraView extends FrameLayout {
mImpl.stopRecording(); mImpl.stopRecording();
} }
public void resumePreview() {
mImpl.resumePreview();
}
public void pausePreview() {
mImpl.pausePreview();
}
public void setPreviewTexture(SurfaceTexture surfaceTexture) { public void setPreviewTexture(SurfaceTexture surfaceTexture) {
mImpl.setPreviewTexture(surfaceTexture); mImpl.setPreviewTexture(surfaceTexture);
} }
@ -591,6 +627,8 @@ public class CameraView extends FrameLayout {
boolean scanning; boolean scanning;
Size pictureSize;
@SuppressWarnings("WrongConstant") @SuppressWarnings("WrongConstant")
public SavedState(Parcel source, ClassLoader loader) { public SavedState(Parcel source, ClassLoader loader) {
super(source); super(source);
@ -602,6 +640,7 @@ public class CameraView extends FrameLayout {
zoom = source.readFloat(); zoom = source.readFloat();
whiteBalance = source.readInt(); whiteBalance = source.readInt();
scanning = source.readByte() != 0; scanning = source.readByte() != 0;
pictureSize = source.readParcelable(loader);
} }
public SavedState(Parcelable superState) { public SavedState(Parcelable superState) {
@ -619,6 +658,7 @@ public class CameraView extends FrameLayout {
out.writeFloat(zoom); out.writeFloat(zoom);
out.writeInt(whiteBalance); out.writeInt(whiteBalance);
out.writeByte((byte) (scanning ? 1 : 0)); out.writeByte((byte) (scanning ? 1 : 0));
out.writeParcelable(pictureSize, flags);
} }
public static final Creator<SavedState> CREATOR public static final Creator<SavedState> CREATOR

View File

@ -21,6 +21,7 @@ import android.view.View;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import java.util.Set; import java.util.Set;
import java.util.SortedSet;
abstract class CameraViewImpl { abstract class CameraViewImpl {
@ -52,6 +53,12 @@ abstract class CameraViewImpl {
abstract Set<AspectRatio> getSupportedAspectRatios(); abstract Set<AspectRatio> getSupportedAspectRatios();
abstract SortedSet<Size> getAvailablePictureSizes(AspectRatio ratio);
abstract void setPictureSize(Size size);
abstract Size getPictureSize();
/** /**
* @return {@code true} if the aspect ratio was changed. * @return {@code true} if the aspect ratio was changed.
*/ */
@ -92,6 +99,10 @@ abstract class CameraViewImpl {
abstract boolean getScanning(); abstract boolean getScanning();
abstract public void resumePreview();
abstract public void pausePreview();
abstract public void setPreviewTexture(SurfaceTexture surfaceTexture); abstract public void setPreviewTexture(SurfaceTexture surfaceTexture);
abstract public Size getPreviewSize(); abstract public Size getPreviewSize();

View File

@ -16,12 +16,14 @@
package com.google.android.cameraview; package com.google.android.cameraview;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.NonNull; import android.support.annotation.NonNull;
/** /**
* Immutable class for describing width and height dimensions in pixels. * Immutable class for describing width and height dimensions in pixels.
*/ */
public class Size implements Comparable<Size> { public class Size implements Comparable<Size>, Parcelable {
private final int mWidth; private final int mWidth;
private final int mHeight; private final int mHeight;
@ -37,6 +39,20 @@ public class Size implements Comparable<Size> {
mHeight = height; mHeight = height;
} }
public static Size parse(String s) {
int position = s.indexOf('x');
if (position == -1) {
throw new IllegalArgumentException("Malformed size: " + s);
}
try {
int width = Integer.parseInt(s.substring(0, position));
int height = Integer.parseInt(s.substring(position + 1));
return new Size(width, height);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Malformed size: " + s, e);
}
}
public int getWidth() { public int getWidth() {
return mWidth; return mWidth;
} }
@ -76,4 +92,28 @@ public class Size implements Comparable<Size> {
return mWidth * mHeight - another.mWidth * another.mHeight; return mWidth * mHeight - another.mWidth * another.mHeight;
} }
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(mWidth);
dest.writeInt(mHeight);
}
public static final Parcelable.Creator<Size> CREATOR = new Parcelable.Creator<Size>() {
@Override
public Size createFromParcel(Parcel source) {
int width = source.readInt();
int height = source.readInt();
return new Size(width, height);
}
@Override
public Size[] newArray(int size) {
return new Size[size];
}
};
} }

View File

@ -12,6 +12,7 @@ import org.reactnative.barcodedetector.BarcodeFormatUtils;
import org.reactnative.camera.tasks.ResolveTakenPictureAsyncTask; import org.reactnative.camera.tasks.ResolveTakenPictureAsyncTask;
import org.reactnative.camera.utils.ScopedContext; import org.reactnative.camera.utils.ScopedContext;
import org.reactnative.facedetector.RNFaceDetector; import org.reactnative.facedetector.RNFaceDetector;
import com.google.android.cameraview.Size;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
@ -20,6 +21,7 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.SortedSet;
public class CameraModule extends ReactContextBaseJavaModule { public class CameraModule extends ReactContextBaseJavaModule {
private static final String TAG = "CameraModule"; private static final String TAG = "CameraModule";
@ -180,6 +182,48 @@ public class CameraModule extends ReactContextBaseJavaModule {
}); });
} }
@ReactMethod
public void pausePreview(final int viewTag) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.pausePreview();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
@ReactMethod
public void resumePreview(final int viewTag) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.resumePreview();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
@ReactMethod @ReactMethod
public void takePicture(final ReadableMap options, final int viewTag, final Promise promise) { public void takePicture(final ReadableMap options, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext(); final ReactApplicationContext context = getReactApplicationContext();
@ -221,7 +265,7 @@ public class CameraModule extends ReactContextBaseJavaModule {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running"); promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
} }
} catch (Exception e) { } catch (Exception e) {
promise.reject("E_CAMERA_BAD_VIEWTAG", "recordAsync: Expected a Camera component"); promise.reject("E_CAPTURE_FAILED", e.getMessage());
} }
} }
}); });
@ -274,4 +318,31 @@ public class CameraModule extends ReactContextBaseJavaModule {
} }
}); });
} }
@ReactMethod
public void getAvailablePictureSizes(final String ratio, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
WritableArray result = Arguments.createArray();
if (cameraView.isCameraOpened()) {
SortedSet<Size> sizes = cameraView.getAvailablePictureSizes(AspectRatio.parse(ratio));
for (Size size : sizes) {
result.pushString(size.toString());
}
promise.resolve(result);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} catch (Exception e) {
promise.reject("E_CAMERA_BAD_VIEWTAG", "getAvailablePictureSizesAsync: Expected a Camera component");
}
}
});
}
} }

View File

@ -7,6 +7,7 @@ import com.facebook.react.uimanager.ThemedReactContext;
import com.facebook.react.uimanager.ViewGroupManager; import com.facebook.react.uimanager.ViewGroupManager;
import com.facebook.react.uimanager.annotations.ReactProp; import com.facebook.react.uimanager.annotations.ReactProp;
import com.google.android.cameraview.AspectRatio; import com.google.android.cameraview.AspectRatio;
import com.google.android.cameraview.Size;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -21,7 +22,8 @@ public class CameraViewManager extends ViewGroupManager<RNCameraView> {
EVENT_ON_BARCODES_DETECTED("onGoogleVisionBarcodesDetected"), EVENT_ON_BARCODES_DETECTED("onGoogleVisionBarcodesDetected"),
EVENT_ON_FACE_DETECTION_ERROR("onFaceDetectionError"), EVENT_ON_FACE_DETECTION_ERROR("onFaceDetectionError"),
EVENT_ON_BARCODE_DETECTION_ERROR("onGoogleVisionBarcodeDetectionError"), EVENT_ON_BARCODE_DETECTION_ERROR("onGoogleVisionBarcodeDetectionError"),
EVENT_ON_TEXT_RECOGNIZED("onTextRecognized"); EVENT_ON_TEXT_RECOGNIZED("onTextRecognized"),
EVENT_ON_PICTURE_SAVED("onPictureSaved");
private final String mName; private final String mName;
@ -99,6 +101,11 @@ public class CameraViewManager extends ViewGroupManager<RNCameraView> {
view.setWhiteBalance(whiteBalance); view.setWhiteBalance(whiteBalance);
} }
@ReactProp(name = "pictureSize")
public void setPictureSize(RNCameraView view, String size) {
view.setPictureSize(Size.parse(size));
}
@ReactProp(name = "barCodeTypes") @ReactProp(name = "barCodeTypes")
public void setBarCodeTypes(RNCameraView view, ReadableArray barCodeTypes) { public void setBarCodeTypes(RNCameraView view, ReadableArray barCodeTypes) {
if (barCodeTypes == null) { if (barCodeTypes == null) {

View File

@ -34,7 +34,7 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
public class RNCameraView extends CameraView implements LifecycleEventListener, BarCodeScannerAsyncTaskDelegate, FaceDetectorAsyncTaskDelegate, public class RNCameraView extends CameraView implements LifecycleEventListener, BarCodeScannerAsyncTaskDelegate, FaceDetectorAsyncTaskDelegate,
BarcodeDetectorAsyncTaskDelegate, TextRecognizerAsyncTaskDelegate { BarcodeDetectorAsyncTaskDelegate, TextRecognizerAsyncTaskDelegate, PictureSavedDelegate {
private ThemedReactContext mThemedReactContext; private ThemedReactContext mThemedReactContext;
private Queue<Promise> mPictureTakenPromises = new ConcurrentLinkedQueue<>(); private Queue<Promise> mPictureTakenPromises = new ConcurrentLinkedQueue<>();
private Map<Promise, ReadableMap> mPictureTakenOptions = new ConcurrentHashMap<>(); private Map<Promise, ReadableMap> mPictureTakenOptions = new ConcurrentHashMap<>();
@ -86,8 +86,11 @@ public class RNCameraView extends CameraView implements LifecycleEventListener,
public void onPictureTaken(CameraView cameraView, final byte[] data) { public void onPictureTaken(CameraView cameraView, final byte[] data) {
Promise promise = mPictureTakenPromises.poll(); Promise promise = mPictureTakenPromises.poll();
ReadableMap options = mPictureTakenOptions.remove(promise); ReadableMap options = mPictureTakenOptions.remove(promise);
if (options.hasKey("fastMode") && options.getBoolean("fastMode")) {
promise.resolve(null);
}
final File cacheDirectory = mPictureTakenDirectories.remove(promise); final File cacheDirectory = mPictureTakenDirectories.remove(promise);
new ResolveTakenPictureAsyncTask(data, promise, options, cacheDirectory).execute(); new ResolveTakenPictureAsyncTask(data, promise, options, cacheDirectory, RNCameraView.this).execute();
} }
@Override @Override
@ -223,7 +226,19 @@ public class RNCameraView extends CameraView implements LifecycleEventListener,
MediaActionSound sound = new MediaActionSound(); MediaActionSound sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK); sound.play(MediaActionSound.SHUTTER_CLICK);
} }
try {
super.takePicture(); super.takePicture();
} catch (Exception e) {
mPictureTakenPromises.remove(promise);
mPictureTakenOptions.remove(promise);
mPictureTakenDirectories.remove(promise);
throw e;
}
}
@Override
public void onPictureSaved(WritableMap response) {
RNCameraViewHelper.emitPictureSavedEvent(this, response);
} }
public void record(ReadableMap options, final Promise promise, File cacheDirectory) { public void record(ReadableMap options, final Promise promise, File cacheDirectory) {

View File

@ -175,6 +175,14 @@ public class RNCameraViewHelper {
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event); reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
} }
// Picture saved event
public static void emitPictureSavedEvent(ViewGroup view, WritableMap response) {
PictureSavedEvent event = PictureSavedEvent.obtain(view.getId(), response);
ReactContext reactContext = (ReactContext) view.getContext();
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
// Face detection events // Face detection events
public static void emitFacesDetectedEvent( public static void emitFacesDetectedEvent(

View File

@ -0,0 +1,46 @@
package org.reactnative.camera.events;
import android.support.v4.util.Pools;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import org.reactnative.camera.CameraViewManager;
public class PictureSavedEvent extends Event<PictureSavedEvent> {
private static final Pools.SynchronizedPool<PictureSavedEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(5);
private PictureSavedEvent() {}
private WritableMap mResponse;
public static PictureSavedEvent obtain(int viewTag, WritableMap response) {
PictureSavedEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new PictureSavedEvent();
}
event.init(viewTag, response);
return event;
}
private void init(int viewTag, WritableMap response) {
super.init(viewTag);
mResponse = response;
}
@Override
public short getCoalescingKey() {
int hashCode = mResponse.getMap("data").getString("uri").hashCode() % Short.MAX_VALUE;
return (short) hashCode;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_PICTURE_SAVED.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), mResponse);
}
}

View File

@ -0,0 +1,7 @@
package org.reactnative.camera.tasks;
import com.facebook.react.bridge.WritableMap;
public interface PictureSavedDelegate {
void onPictureSaved(WritableMap response);
}

View File

@ -30,18 +30,14 @@ public class ResolveTakenPictureAsyncTask extends AsyncTask<Void, Void, Writable
private ReadableMap mOptions; private ReadableMap mOptions;
private File mCacheDirectory; private File mCacheDirectory;
private Bitmap mBitmap; private Bitmap mBitmap;
private PictureSavedDelegate mPictureSavedDelegate;
public ResolveTakenPictureAsyncTask(byte[] imageData, Promise promise, ReadableMap options) { public ResolveTakenPictureAsyncTask(byte[] imageData, Promise promise, ReadableMap options, File cacheDirectory, PictureSavedDelegate delegate) {
mPromise = promise;
mOptions = options;
mImageData = imageData;
}
public ResolveTakenPictureAsyncTask(byte[] imageData, Promise promise, ReadableMap options, File cacheDirectory) {
mPromise = promise; mPromise = promise;
mOptions = options; mOptions = options;
mImageData = imageData; mImageData = imageData;
mCacheDirectory = cacheDirectory; mCacheDirectory = cacheDirectory;
mPictureSavedDelegate = delegate;
} }
private int getQuality() { private int getQuality() {
@ -231,8 +227,15 @@ public class ResolveTakenPictureAsyncTask extends AsyncTask<Void, Void, Writable
// If the response is not null everything went well and we can resolve the promise. // If the response is not null everything went well and we can resolve the promise.
if (response != null) { if (response != null) {
if (mOptions.hasKey("fastMode") && mOptions.getBoolean("fastMode")) {
WritableMap wrapper = Arguments.createMap();
wrapper.putInt("id", mOptions.getInt("id"));
wrapper.putMap("data", response);
mPictureSavedDelegate.onPictureSaved(wrapper);
} else {
mPromise.resolve(response); mPromise.resolve(response);
} }
} }
}
} }

View File

@ -29,7 +29,9 @@
@property (assign, nonatomic) NSInteger autoFocus; @property (assign, nonatomic) NSInteger autoFocus;
@property (assign, nonatomic) float focusDepth; @property (assign, nonatomic) float focusDepth;
@property (assign, nonatomic) NSInteger whiteBalance; @property (assign, nonatomic) NSInteger whiteBalance;
@property (nonatomic, assign, getter=isReadingBarCodes) BOOL barCodeReading; @property (assign, nonatomic) AVCaptureSessionPreset pictureSize;
@property (nonatomic, assign) BOOL isReadingBarCodes;
@property (nonatomic, assign) BOOL isDetectingFaces;
@property(assign, nonatomic) AVVideoCodecType videoCodecType; @property(assign, nonatomic) AVVideoCodecType videoCodecType;
- (id)initWithBridge:(RCTBridge *)bridge; - (id)initWithBridge:(RCTBridge *)bridge;
@ -39,6 +41,7 @@
- (void)updateFocusDepth; - (void)updateFocusDepth;
- (void)updateZoom; - (void)updateZoom;
- (void)updateWhiteBalance; - (void)updateWhiteBalance;
- (void)updatePictureSize;
- (void)updateFaceDetecting:(id)isDetectingFaces; - (void)updateFaceDetecting:(id)isDetectingFaces;
- (void)updateFaceDetectionMode:(id)requestedMode; - (void)updateFaceDetectionMode:(id)requestedMode;
- (void)updateFaceDetectionLandmarks:(id)requestedLandmarks; - (void)updateFaceDetectionLandmarks:(id)requestedLandmarks;
@ -46,11 +49,14 @@
- (void)takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject; - (void)takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;
- (void)record:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject; - (void)record:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;
- (void)stopRecording; - (void)stopRecording;
- (void)resumePreview;
- (void)pausePreview;
- (void)setupOrDisableBarcodeScanner; - (void)setupOrDisableBarcodeScanner;
- (void)onReady:(NSDictionary *)event; - (void)onReady:(NSDictionary *)event;
- (void)onMountingError:(NSDictionary *)event; - (void)onMountingError:(NSDictionary *)event;
- (void)onCodeRead:(NSDictionary *)event; - (void)onCodeRead:(NSDictionary *)event;
- (void)onFacesDetected:(NSDictionary *)event; - (void)onFacesDetected:(NSDictionary *)event;
- (void)onPictureSaved:(NSDictionary *)event;
@end @end

View File

@ -21,6 +21,7 @@
@property (nonatomic, copy) RCTDirectEventBlock onMountError; @property (nonatomic, copy) RCTDirectEventBlock onMountError;
@property (nonatomic, copy) RCTDirectEventBlock onBarCodeRead; @property (nonatomic, copy) RCTDirectEventBlock onBarCodeRead;
@property (nonatomic, copy) RCTDirectEventBlock onFacesDetected; @property (nonatomic, copy) RCTDirectEventBlock onFacesDetected;
@property (nonatomic, copy) RCTDirectEventBlock onPictureSaved;
@end @end
@ -85,6 +86,13 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
} }
} }
- (void)onPictureSaved:(NSDictionary *)event
{
if (_onPictureSaved) {
_onPictureSaved(event);
}
}
- (void)layoutSubviews - (void)layoutSubviews
{ {
[super layoutSubviews]; [super layoutSubviews];
@ -210,7 +218,7 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
AVCaptureDevice *device = [self.videoCaptureDeviceInput device]; AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil; NSError *error = nil;
if (self.autoFocus < 0 || device.focusMode != RNCameraAutoFocusOff || device.position == RNCameraTypeFront) { if (device == nil || self.autoFocus < 0 || device.focusMode != RNCameraAutoFocusOff || device.position == RNCameraTypeFront) {
return; return;
} }
@ -284,6 +292,11 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
[device unlockForConfiguration]; [device unlockForConfiguration];
} }
- (void)updatePictureSize
{
[self updateSessionPreset:self.pictureSize];
}
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>) #if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
- (void)updateFaceDetecting:(id)faceDetecting - (void)updateFaceDetecting:(id)faceDetecting
{ {
@ -318,16 +331,24 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
[connection setVideoOrientation:orientation]; [connection setVideoOrientation:orientation];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) { [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer && !error) { if (imageSampleBuffer && !error) {
BOOL useFastMode = options[@"fastMode"] && [options[@"fastMode"] boolValue];
if (useFastMode) {
resolve(nil);
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer]; NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *takenImage = [UIImage imageWithData:imageData]; UIImage *takenImage = [UIImage imageWithData:imageData];
CGRect frame = [_previewLayer metadataOutputRectOfInterestForRect:self.frame];
CGImageRef takenCGImage = takenImage.CGImage; CGImageRef takenCGImage = takenImage.CGImage;
size_t width = CGImageGetWidth(takenCGImage); CGSize previewSize;
size_t height = CGImageGetHeight(takenCGImage); if (UIInterfaceOrientationIsPortrait([[UIApplication sharedApplication] statusBarOrientation])) {
CGRect cropRect = CGRectMake(frame.origin.x * width, frame.origin.y * height, frame.size.width * width, frame.size.height * height); previewSize = CGSizeMake(self.previewLayer.frame.size.height, self.previewLayer.frame.size.width);
takenImage = [RNImageUtils cropImage:takenImage toRect:cropRect]; } else {
previewSize = CGSizeMake(self.previewLayer.frame.size.width, self.previewLayer.frame.size.height);
}
CGRect cropRect = CGRectMake(0, 0, CGImageGetWidth(takenCGImage), CGImageGetHeight(takenCGImage));
CGRect croppedSize = AVMakeRectWithAspectRatioInsideRect(previewSize, cropRect);
takenImage = [RNImageUtils cropImage:takenImage toRect:croppedSize];
if ([options[@"mirrorImage"] boolValue]) { if ([options[@"mirrorImage"] boolValue]) {
takenImage = [RNImageUtils mirrorImage:takenImage]; takenImage = [RNImageUtils mirrorImage:takenImage];
@ -377,7 +398,11 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
[RNImageUtils updatePhotoMetadata:imageSampleBuffer withAdditionalData:@{ @"Orientation": @(imageRotation) } inResponse:response]; // TODO [RNImageUtils updatePhotoMetadata:imageSampleBuffer withAdditionalData:@{ @"Orientation": @(imageRotation) } inResponse:response]; // TODO
} }
if (useFastMode) {
[self onPictureSaved:@{@"data": response, @"id": options[@"id"]}];
} else {
resolve(response); resolve(response);
}
} else { } else {
reject(@"E_IMAGE_CAPTURE_FAILED", @"Image could not be captured", error); reject(@"E_IMAGE_CAPTURE_FAILED", @"Image could not be captured", error);
} }
@ -447,6 +472,16 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
[self.movieFileOutput stopRecording]; [self.movieFileOutput stopRecording];
} }
- (void)resumePreview
{
[[self.previewLayer connection] setEnabled:YES];
}
- (void)pausePreview
{
[[self.previewLayer connection] setEnabled:NO];
}
- (void)startSession - (void)startSession
{ {
#if TARGET_IPHONE_SIMULATOR #if TARGET_IPHONE_SIMULATOR
@ -571,10 +606,14 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
#pragma mark - internal #pragma mark - internal
- (void)updateSessionPreset:(NSString *)preset - (void)updateSessionPreset:(AVCaptureSessionPreset)preset
{ {
#if !(TARGET_IPHONE_SIMULATOR) #if !(TARGET_IPHONE_SIMULATOR)
if (preset) { if (preset) {
if (self.isDetectingFaces && [preset isEqual:AVCaptureSessionPresetPhoto]) {
RCTLog(@"AVCaptureSessionPresetPhoto not supported during face detection. Falling back to AVCaptureSessionPresetHigh");
preset = AVCaptureSessionPresetHigh;
}
dispatch_async(self.sessionQueue, ^{ dispatch_async(self.sessionQueue, ^{
[self.session beginConfiguration]; [self.session beginConfiguration];
if ([self.session canSetSessionPreset:preset]) { if ([self.session canSetSessionPreset:preset]) {

View File

@ -16,6 +16,7 @@ RCT_EXPORT_VIEW_PROPERTY(onCameraReady, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onMountError, RCTDirectEventBlock); RCT_EXPORT_VIEW_PROPERTY(onMountError, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onBarCodeRead, RCTDirectEventBlock); RCT_EXPORT_VIEW_PROPERTY(onBarCodeRead, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onFacesDetected, RCTDirectEventBlock); RCT_EXPORT_VIEW_PROPERTY(onFacesDetected, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onPictureSaved, RCTDirectEventBlock);
+ (BOOL)requiresMainQueueSetup + (BOOL)requiresMainQueueSetup
{ {
@ -71,7 +72,7 @@ RCT_EXPORT_VIEW_PROPERTY(onFacesDetected, RCTDirectEventBlock);
- (NSArray<NSString *> *)supportedEvents - (NSArray<NSString *> *)supportedEvents
{ {
return @[@"onCameraReady", @"onMountError", @"onBarCodeRead", @"onFacesDetected"]; return @[@"onCameraReady", @"onMountError", @"onBarCodeRead", @"onFacesDetected", @"onPictureSaved"];
} }
+ (NSDictionary *)validCodecTypes + (NSDictionary *)validCodecTypes
@ -111,6 +112,21 @@ RCT_EXPORT_VIEW_PROPERTY(onFacesDetected, RCTDirectEventBlock);
}; };
} }
+ (NSDictionary *)pictureSizes
{
return @{
@"3840x2160" : AVCaptureSessionPreset3840x2160,
@"1920x1080" : AVCaptureSessionPreset1920x1080,
@"1280x720" : AVCaptureSessionPreset1280x720,
@"640x480" : AVCaptureSessionPreset640x480,
@"352x288" : AVCaptureSessionPreset352x288,
@"Photo" : AVCaptureSessionPresetPhoto,
@"High" : AVCaptureSessionPresetHigh,
@"Medium" : AVCaptureSessionPresetMedium,
@"Low" : AVCaptureSessionPresetLow
};
}
+ (NSDictionary *)faceDetectorConstants + (NSDictionary *)faceDetectorConstants
{ {
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>) #if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
@ -162,8 +178,16 @@ RCT_CUSTOM_VIEW_PROPERTY(whiteBalance, NSInteger, RNCamera)
[view updateWhiteBalance]; [view updateWhiteBalance];
} }
RCT_CUSTOM_VIEW_PROPERTY(pictureSize, NSString *, RNCamera)
{
[view setPictureSize:[[self class] pictureSizes][[RCTConvert NSString:json]]];
[view updatePictureSize];
}
RCT_CUSTOM_VIEW_PROPERTY(faceDetectorEnabled, BOOL, RNCamera) RCT_CUSTOM_VIEW_PROPERTY(faceDetectorEnabled, BOOL, RNCamera)
{ {
view.isDetectingFaces = [RCTConvert BOOL:json];
[view updateFaceDetecting:json]; [view updateFaceDetecting:json];
} }
@ -185,7 +209,7 @@ RCT_CUSTOM_VIEW_PROPERTY(faceDetectionClassifications, NSString, RNCamera)
RCT_CUSTOM_VIEW_PROPERTY(barCodeScannerEnabled, BOOL, RNCamera) RCT_CUSTOM_VIEW_PROPERTY(barCodeScannerEnabled, BOOL, RNCamera)
{ {
view.barCodeReading = [RCTConvert BOOL:json]; view.isReadingBarCodes = [RCTConvert BOOL:json];
[view setupOrDisableBarcodeScanner]; [view setupOrDisableBarcodeScanner];
} }
@ -200,11 +224,20 @@ RCT_REMAP_METHOD(takePicture,
resolver:(RCTPromiseResolveBlock)resolve resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject) rejecter:(RCTPromiseRejectBlock)reject)
{ {
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else {
#if TARGET_IPHONE_SIMULATOR #if TARGET_IPHONE_SIMULATOR
NSMutableDictionary *response = [[NSMutableDictionary alloc] init]; NSMutableDictionary *response = [[NSMutableDictionary alloc] init];
float quality = [options[@"quality"] floatValue]; float quality = [options[@"quality"] floatValue];
NSString *path = [RNFileSystem generatePathInDirectory:[[RNFileSystem cacheDirectoryPath] stringByAppendingPathComponent:@"Camera"] withExtension:@".jpg"]; NSString *path = [RNFileSystem generatePathInDirectory:[[RNFileSystem cacheDirectoryPath] stringByAppendingPathComponent:@"Camera"] withExtension:@".jpg"];
UIImage *generatedPhoto = [RNImageUtils generatePhotoOfSize:CGSizeMake(200, 200)]; UIImage *generatedPhoto = [RNImageUtils generatePhotoOfSize:CGSizeMake(200, 200)];
BOOL useFastMode = options[@"fastMode"] && [options[@"fastMode"] boolValue];
if (useFastMode) {
resolve(nil);
}
NSData *photoData = UIImageJPEGRepresentation(generatedPhoto, quality); NSData *photoData = UIImageJPEGRepresentation(generatedPhoto, quality);
response[@"uri"] = [RNImageUtils writeImage:photoData toPath:path]; response[@"uri"] = [RNImageUtils writeImage:photoData toPath:path];
response[@"width"] = @(generatedPhoto.size.width); response[@"width"] = @(generatedPhoto.size.width);
@ -212,17 +245,16 @@ RCT_REMAP_METHOD(takePicture,
if ([options[@"base64"] boolValue]) { if ([options[@"base64"] boolValue]) {
response[@"base64"] = [photoData base64EncodedStringWithOptions:0]; response[@"base64"] = [photoData base64EncodedStringWithOptions:0];
} }
resolve(response); if (useFastMode) {
#else [view onPictureSaved:@{@"data": response, @"id": options[@"id"]}];
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else { } else {
resolve(response);
}
#else
[view takePicture:options resolve:resolve reject:reject]; [view takePicture:options resolve:resolve reject:reject];
#endif
} }
}]; }];
#endif
} }
RCT_REMAP_METHOD(record, RCT_REMAP_METHOD(record,
@ -245,6 +277,36 @@ RCT_REMAP_METHOD(record,
}]; }];
} }
RCT_EXPORT_METHOD(resumePreview:(nonnull NSNumber *)reactTag)
{
#if TARGET_IPHONE_SIMULATOR
return;
#endif
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else {
[view resumePreview];
}
}];
}
RCT_EXPORT_METHOD(pausePreview:(nonnull NSNumber *)reactTag)
{
#if TARGET_IPHONE_SIMULATOR
return;
#endif
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else {
[view pausePreview];
}
}];
}
RCT_REMAP_METHOD(stopRecording, reactTag:(nonnull NSNumber *)reactTag) RCT_REMAP_METHOD(stopRecording, reactTag:(nonnull NSNumber *)reactTag)
{ {
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) { [self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
@ -283,5 +345,14 @@ RCT_EXPORT_METHOD(checkVideoAuthorizationStatus:(RCTPromiseResolveBlock)resolve
}]; }];
} }
RCT_REMAP_METHOD(getAvailablePictureSizes,
ratio:(NSString *)ratio
reactTag:(nonnull NSNumber *)reactTag
resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject)
{
resolve([[[self class] pictureSizes] allKeys]);
}
@end @end

View File

@ -30,7 +30,7 @@ const styles = StyleSheet.create({
}, },
}); });
type Orientation = "auto"|"landscapeLeft"|"landscapeRight"|"portrait"|"portraitUpsideDown"; type Orientation = 'auto' | 'landscapeLeft' | 'landscapeRight' | 'portrait' | 'portraitUpsideDown';
type PictureOptions = { type PictureOptions = {
quality?: number, quality?: number,
@ -82,6 +82,7 @@ type PropsType = typeof View.props & {
type?: number | string, type?: number | string,
onCameraReady?: Function, onCameraReady?: Function,
onBarCodeRead?: Function, onBarCodeRead?: Function,
onPictureSaved?: Function,
onGoogleVisionBarcodesDetected?: Function, onGoogleVisionBarcodesDetected?: Function,
faceDetectionMode?: number, faceDetectionMode?: number,
flashMode?: number | string, flashMode?: number | string,
@ -96,6 +97,7 @@ type PropsType = typeof View.props & {
captureAudio?: boolean, captureAudio?: boolean,
useCamera2Api?: boolean, useCamera2Api?: boolean,
playSoundOnCapture?: boolean, playSoundOnCapture?: boolean,
pictureSize?: string,
}; };
type StateType = { type StateType = {
@ -176,6 +178,7 @@ export default class Camera extends React.Component<PropsType, StateType> {
onMountError: PropTypes.func, onMountError: PropTypes.func,
onCameraReady: PropTypes.func, onCameraReady: PropTypes.func,
onBarCodeRead: PropTypes.func, onBarCodeRead: PropTypes.func,
onPictureSaved: PropTypes.func,
onGoogleVisionBarcodesDetected: PropTypes.func, onGoogleVisionBarcodesDetected: PropTypes.func,
onFacesDetected: PropTypes.func, onFacesDetected: PropTypes.func,
onTextRecognized: PropTypes.func, onTextRecognized: PropTypes.func,
@ -195,6 +198,7 @@ export default class Camera extends React.Component<PropsType, StateType> {
captureAudio: PropTypes.bool, captureAudio: PropTypes.bool,
useCamera2Api: PropTypes.bool, useCamera2Api: PropTypes.bool,
playSoundOnCapture: PropTypes.bool, playSoundOnCapture: PropTypes.bool,
pictureSize: PropTypes.string,
}; };
static defaultProps: Object = { static defaultProps: Object = {
@ -226,6 +230,7 @@ export default class Camera extends React.Component<PropsType, StateType> {
captureAudio: false, captureAudio: false,
useCamera2Api: false, useCamera2Api: false,
playSoundOnCapture: false, playSoundOnCapture: false,
pictureSize: 'Photo',
}; };
_cameraRef: ?Object; _cameraRef: ?Object;
@ -289,6 +294,12 @@ export default class Camera extends React.Component<PropsType, StateType> {
} }
}; };
_onPictureSaved = ({ nativeEvent }) => {
if (this.props.onPictureSaved) {
this.props.onPictureSaved(nativeEvent);
}
};
_onObjectDetected = (callback: ?Function) => ({ nativeEvent }: EventCallbackArgumentsType) => { _onObjectDetected = (callback: ?Function) => ({ nativeEvent }: EventCallbackArgumentsType) => {
const { type } = nativeEvent; const { type } = nativeEvent;
@ -363,6 +374,7 @@ export default class Camera extends React.Component<PropsType, StateType> {
onBarCodeRead={this._onObjectDetected(this.props.onBarCodeRead)} onBarCodeRead={this._onObjectDetected(this.props.onBarCodeRead)}
onFacesDetected={this._onObjectDetected(this.props.onFacesDetected)} onFacesDetected={this._onObjectDetected(this.props.onFacesDetected)}
onTextRecognized={this._onObjectDetected(this.props.onTextRecognized)} onTextRecognized={this._onObjectDetected(this.props.onTextRecognized)}
onPictureSaved={this._onPictureSaved}
> >
{this.renderChildren()} {this.renderChildren()}
</RNCamera> </RNCamera>
@ -427,6 +439,7 @@ const RNCamera = requireNativeComponent('RNCamera', Camera, {
onBarCodeRead: true, onBarCodeRead: true,
onGoogleVisionBarcodesDetected: true, onGoogleVisionBarcodesDetected: true,
onCameraReady: true, onCameraReady: true,
onPictureSaved: true,
onFaceDetected: true, onFaceDetected: true,
onLayout: true, onLayout: true,
onMountError: true, onMountError: true,