Merge pull request #270 from lwansbrough/v1

v1 Updates
This commit is contained in:
Zack Story 2016-04-27 20:34:46 -07:00
commit c3ca7cbb03
13 changed files with 532 additions and 249 deletions

View File

@ -6,11 +6,6 @@ A camera module for React Native.
![](https://i.imgur.com/5j2JdUk.gif)
## Known Issues
Below is a list of known issues. Pull requests are welcome for any of these issues!
- Android support is only available through GitHub at the moment. The npm package does not have a working Android implementation.
- Stills captured to disk will not be cleaned up and thus must be managed manually for now
## Getting started
### Mostly automatic install
1. `npm install rnpm --global`
@ -164,7 +159,7 @@ The `orientation` property allows you to specify the current orientation of the
Values: `true` (default) or `false`
This property allows you to specify whether a sound is played on capture
This property allows you to specify whether a sound is played on capture. It is currently android only, pending [a reasonable mute implementation](http://stackoverflow.com/questions/4401232/avfoundation-how-to-turn-off-the-shutter-sound-when-capturestillimageasynchrono) in iOS.
#### `iOS` `onBarCodeRead`
@ -190,6 +185,10 @@ The following barcode types can be recognised:
The barcode type is provided in the `data` object.
#### `iOS` `barCodeTypes`
An array of barcode types to search for. Defaults to all types listed above. No effect if `onBarCodeRead` is undefined.
#### `flashMode`
Values:
@ -240,7 +239,7 @@ You can access component methods by adding a `ref` (ie. `ref="camera"`) prop to
#### `capture([options]): Promise`
Captures data from the camera. What is captured is based on the `captureMode` and `captureTarget` props. `captureMode` tells the camera whether you want a still image or video. `captureTarget` allows you to specify how you want the data to be captured and sent back to you. See `captureTarget` under Properties to see the available values. The promise will be fulfilled with the image data or file handle of the image on disk, depending on `target`.
Captures data from the camera. What is captured is based on the `captureMode` and `captureTarget` props. `captureMode` tells the camera whether you want a still image or video. `captureTarget` allows you to specify how you want the data to be captured and sent back to you. See `captureTarget` under Properties to see the available values.
Supported options:
@ -251,6 +250,15 @@ Supported options:
- `location` This is the object returned from `navigator.geolocation.getCurrentPosition()` (React Native's geolocation polyfill). It will add GPS metadata to the image.
- `rotation` This will rotate the image by the number of degrees specified.
The promise will be fulfilled with an object with some of the following properties:
- `data`: Returns a base64-encoded string with the capture data (only returned with the deprecated `Camera.constants.CaptureTarget.memory`)
- `path`: Returns the path of the captured image or video file on disk
- `width`: (currently iOS video only) returns the video file's frame width
- `height`: (currently iOS video only) returns the video file's frame height
- `duration`: (currently iOS video only) video file duration
- `size`: (currently iOS video only) video file size (in bytes)
#### `iOS` `getFOV(): Promise`
Returns the camera's current field of view.

View File

@ -224,6 +224,32 @@ public class RCTCamera {
}
}
public void adjustCameraRotationToDeviceOrientation(int type, int deviceOrientation)
{
Camera camera = _cameras.get(type);
if (null == camera) {
return;
}
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
int rotation;
int orientation = cameraInfo.info.orientation;
if (cameraInfo.info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
rotation = (orientation + deviceOrientation * 90) % 360;
} else {
rotation = (orientation - deviceOrientation * 90 + 360) % 360;
}
cameraInfo.rotation = rotation;
Camera.Parameters parameters = camera.getParameters();
parameters.setRotation(cameraInfo.rotation);
try {
camera.setParameters(parameters);
} catch (Exception e) {
e.printStackTrace();
}
}
private void adjustPreviewLayout(int type) {
Camera camera = _cameras.get(type);
if (null == camera) {

View File

@ -53,10 +53,12 @@ public class RCTCameraModule extends ReactContextBaseJavaModule {
public static final int MEDIA_TYPE_VIDEO = 2;
private final ReactApplicationContext _reactContext;
private RCTSensorOrientationChecker _sensorOrientationChecker;
public RCTCameraModule(ReactApplicationContext reactContext) {
super(reactContext);
_reactContext = reactContext;
_sensorOrientationChecker = new RCTSensorOrientationChecker(_reactContext);
}
@Override
@ -70,6 +72,7 @@ public class RCTCameraModule extends ReactContextBaseJavaModule {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("Aspect", getAspectConstants());
put("BarCodeType", getBarCodeConstants());
put("Type", getTypeConstants());
put("CaptureQuality", getCaptureQualityConstants());
put("CaptureMode", getCaptureModeConstants());
@ -89,6 +92,14 @@ public class RCTCameraModule extends ReactContextBaseJavaModule {
});
}
private Map<String, Object> getBarCodeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
// @TODO add barcode types
}
});
}
private Map<String, Object> getTypeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
@ -165,27 +176,38 @@ public class RCTCameraModule extends ReactContextBaseJavaModule {
@ReactMethod
public void capture(final ReadableMap options, final Promise promise) {
_sensorOrientationChecker.onResume();
_sensorOrientationChecker.registerOrientationListener(new RCTSensorOrientationListener() {
@Override
public void orientationEvent() {
int deviceOrientation = _sensorOrientationChecker.getOrientation();
_sensorOrientationChecker.unregisterOrientationListener();
_sensorOrientationChecker.onPause();
captureWithOrientation(options, promise, deviceOrientation);
}
});
}
public void captureWithOrientation(final ReadableMap options, final Promise promise, int deviceOrientation) {
Camera camera = RCTCamera.getInstance().acquireCameraInstance(options.getInt("type"));
if (null == camera) {
promise.reject("No camera found.");
return;
}
if (options.getBoolean("playSoundOnCapture")) {
MediaActionSound sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
}
RCTCamera.getInstance().adjustCameraRotationToDeviceOrientation(options.getInt("type"), deviceOrientation);
RCTCamera.getInstance().setCaptureQuality(options.getInt("type"), options.getString("quality"));
camera.takePicture(null, null, new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, Camera camera) {
camera.stopPreview();
camera.startPreview();
WritableMap response = new WritableNativeMap();
switch (options.getInt("target")) {
case RCT_CAMERA_CAPTURE_TARGET_MEMORY:
String encoded = Base64.encodeToString(data, Base64.DEFAULT);
promise.resolve(encoded);
response.putString("data", encoded);
promise.resolve(response);
break;
case RCT_CAMERA_CAPTURE_TARGET_CAMERA_ROLL:
BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
@ -194,7 +216,8 @@ public class RCTCameraModule extends ReactContextBaseJavaModule {
_reactContext.getContentResolver(),
bitmap, options.getString("title"),
options.getString("description"));
promise.resolve(url);
response.putString("path", url);
promise.resolve(response);
break;
case RCT_CAMERA_CAPTURE_TARGET_DISK:
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
@ -212,7 +235,8 @@ public class RCTCameraModule extends ReactContextBaseJavaModule {
} catch (IOException e) {
promise.reject("Error accessing file: " + e.getMessage());
}
promise.resolve(Uri.fromFile(pictureFile).toString());
response.putString("path", Uri.fromFile(pictureFile).toString());
promise.resolve(response);
break;
case RCT_CAMERA_CAPTURE_TARGET_TEMP:
File tempFile = getTempMediaFile(MEDIA_TYPE_IMAGE);
@ -231,7 +255,8 @@ public class RCTCameraModule extends ReactContextBaseJavaModule {
} catch (IOException e) {
promise.reject("Error accessing file: " + e.getMessage());
}
promise.resolve(Uri.fromFile(tempFile).toString());
response.putString("path", Uri.fromFile(tempFile).toString());
promise.resolve(response);
break;
}
}

View File

@ -0,0 +1,84 @@
/**
* Created by rpopovici on 23/03/16.
*/
package com.lwansbrough.RCTCamera;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import com.facebook.react.bridge.ReactApplicationContext;
interface RCTSensorOrientationListener {
void orientationEvent();
}
public class RCTSensorOrientationChecker {
int mOrientation = 0;
private SensorEventListener mSensorEventListener;
private SensorManager mSensorManager;
private RCTSensorOrientationListener mListener = null;
public RCTSensorOrientationChecker( ReactApplicationContext reactContext) {
mSensorEventListener = new Listener();
mSensorManager = (SensorManager) reactContext.getSystemService(Context.SENSOR_SERVICE);
}
/**
* Call on activity onResume()
*/
public void onResume() {
mSensorManager.registerListener(mSensorEventListener, mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_NORMAL);
}
/**
* Call on activity onPause()
*/
public void onPause() {
mSensorManager.unregisterListener(mSensorEventListener);
}
private class Listener implements SensorEventListener {
@Override
public void onSensorChanged(SensorEvent event) {
float x = event.values[0];
float y = event.values[1];
if (x<5 && x>-5 && y > 5)
mOrientation = 0;
else if (x<-5 && y<5 && y>-5)
mOrientation = 3;
else if (x<5 && x>-5 && y<-5)
mOrientation = 2;
else if (x>5 && y<5 && y>-5)
mOrientation = 1;
if (mListener != null) {
mListener.orientationEvent();
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
}
public int getOrientation() {
return mOrientation;
}
public void registerOrientationListener(RCTSensorOrientationListener listener) {
this.mListener = listener;
}
public void unregisterOrientationListener() {
mListener = null;
}
}

View File

@ -1,2 +0,0 @@
import Camera from './Camera';
export default Camera;

View File

@ -1,2 +0,0 @@
import Camera from './Camera';
export default Camera;

View File

@ -12,7 +12,7 @@ import {
const CameraManager = NativeModules.CameraManager || NativeModules.CameraModule;
const CAMERA_REF = 'camera';
function convertStringProps(props) {
function convertNativeProps(props) {
const newProps = { ...props };
if (typeof props.aspect === 'string') {
newProps.aspect = Camera.constants.Aspect[props.aspect];
@ -38,6 +38,15 @@ function convertStringProps(props) {
newProps.captureQuality = Camera.constants.CaptureQuality[props.captureQuality];
}
if (typeof props.captureMode === 'string') {
newProps.captureMode = Camera.constants.CaptureMode[props.captureMode];
}
// do not register barCodeTypes if no barcode listener
if (typeof props.onBarCodeRead !== 'function') {
newProps.barCodeTypes = [];
}
return newProps;
}
@ -84,6 +93,7 @@ export default class Camera extends Component {
onFocusChanged: PropTypes.func,
onZoomChanged: PropTypes.func,
mirrorImage: PropTypes.bool,
barCodeTypes: PropTypes.array,
orientation: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number
@ -112,6 +122,7 @@ export default class Camera extends Component {
playSoundOnCapture: true,
torchMode: CameraManager.TorchMode.off,
mirrorImage: false,
barCodeTypes: Object.values(CameraManager.BarCodeType),
};
static checkDeviceAuthorizationStatus = CameraManager.checkDeviceAuthorizationStatus;
@ -131,9 +142,11 @@ export default class Camera extends Component {
}
async componentWillMount() {
this.cameraBarCodeReadListener = NativeAppEventEmitter.addListener('CameraBarCodeRead', this.props.onBarCodeRead);
this.cameraBarCodeReadListener = NativeAppEventEmitter.addListener('CameraBarCodeRead', this._onBarCodeRead);
let check = this.props.captureAudio ? Camera.checkDeviceAuthorizationStatus : Camera.checkVideoAuthorizationStatus;
let { captureMode } = convertNativeProps({captureMode: this.props.captureMode})
let hasVideoAndAudio = this.props.captureAudio && captureMode === Camera.constants.CaptureMode.video
let check = hasVideoAndAudio ? Camera.checkDeviceAuthorizationStatus : Camera.checkVideoAuthorizationStatus;
if (check) {
const isAuthorized = await check();
@ -151,15 +164,21 @@ export default class Camera extends Component {
render() {
const style = [styles.base, this.props.style];
const nativeProps = convertStringProps(this.props);
const nativeProps = convertNativeProps(this.props);
return <RCTCamera ref={CAMERA_REF} {...nativeProps} />;
}
_onBarCodeRead = (data) => {
if (this.props.onBarCodeRead) this.props.onBarCodeRead(data)
};
capture(options) {
const props = convertStringProps(this.props);
const props = convertNativeProps(this.props);
options = {
audio: props.captureAudio,
barCodeTypes: props.barCodeTypes,
playSoundOnCapture: props.playSoundOnCapture,
mode: props.captureMode,
playSoundOnCapture: props.playSoundOnCapture,
target: props.captureTarget,
@ -192,7 +211,7 @@ export default class Camera extends Component {
hasFlash() {
if (Platform.OS === 'android') {
const props = convertStringProps(this.props);
const props = convertNativeProps(this.props);
return CameraManager.hasFlash({
type: props.type
});

View File

@ -27,35 +27,6 @@
BOOL _previousIdleTimerDisabled;
}
- (void)setAspect:(NSInteger)aspect
{
NSString *aspectString;
switch (aspect) {
default:
case RCTCameraAspectFill:
aspectString = AVLayerVideoGravityResizeAspectFill;
break;
case RCTCameraAspectFit:
aspectString = AVLayerVideoGravityResizeAspect;
break;
case RCTCameraAspectStretch:
aspectString = AVLayerVideoGravityResize;
break;
}
[self.manager changeAspect:aspectString];
}
- (void)setType:(NSInteger)type
{
if (self.manager.session.isRunning) {
[self.manager changeCamera:type];
}
else {
self.manager.presetCamera = type;
}
[self.manager initializeCaptureSessionInput:AVMediaTypeVideo];
}
- (void)setOrientation:(NSInteger)orientation
{
if (orientation == RCTCameraOrientationAuto) {
@ -68,21 +39,6 @@
}
}
- (void)setMirrorImage:(BOOL)mirrorImage
{
[self.manager changeMirrorImage:mirrorImage];
}
- (void)setFlashMode:(NSInteger)flashMode
{
[self.manager changeFlashMode:flashMode];
}
- (void)setTorchMode:(NSInteger)torchMode
{
[self.manager changeTorchMode:torchMode];
}
- (void)setOnFocusChanged:(BOOL)enabled
{
if (_onFocusChanged != enabled) {
@ -104,13 +60,6 @@
}
}
- (void)setKeepAwake:(BOOL)enabled
{
if (enabled) {
[UIApplication sharedApplication].idleTimerDisabled = true;
}
}
- (id)initWithManager:(RCTCameraManager*)manager bridge:(RCTBridge *)bridge
{

View File

@ -11,6 +11,7 @@
4107014D1ACB732B00C6AA39 /* RCTCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = 410701481ACB732B00C6AA39 /* RCTCamera.m */; };
4107014E1ACB732B00C6AA39 /* RCTCameraManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 4107014A1ACB732B00C6AA39 /* RCTCameraManager.m */; };
454EBCF41B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m in Sources */ = {isa = PBXBuildFile; fileRef = 454EBCF31B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m */; };
9FE592B31CA3CBF500788287 /* RCTSensorOrientationChecker.m in Sources */ = {isa = PBXBuildFile; fileRef = 9FE592B21CA3CBF500788287 /* RCTSensorOrientationChecker.m */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
@ -34,6 +35,8 @@
410701491ACB732B00C6AA39 /* RCTCameraManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTCameraManager.h; sourceTree = "<group>"; };
4107014A1ACB732B00C6AA39 /* RCTCameraManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTCameraManager.m; sourceTree = "<group>"; };
454EBCF31B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "NSMutableDictionary+ImageMetadata.m"; sourceTree = "<group>"; };
9FE592B11CA3CBF500788287 /* RCTSensorOrientationChecker.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTSensorOrientationChecker.h; sourceTree = "<group>"; };
9FE592B21CA3CBF500788287 /* RCTSensorOrientationChecker.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTSensorOrientationChecker.m; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@ -50,6 +53,8 @@
410701241ACB719800C6AA39 = {
isa = PBXGroup;
children = (
9FE592B11CA3CBF500788287 /* RCTSensorOrientationChecker.h */,
9FE592B21CA3CBF500788287 /* RCTSensorOrientationChecker.m */,
0314E39C1B661A460092D183 /* CameraFocusSquare.m */,
0314E39B1B661A0C0092D183 /* CameraFocusSquare.h */,
454EBCF31B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m */,
@ -128,6 +133,7 @@
454EBCF41B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m in Sources */,
4107014E1ACB732B00C6AA39 /* RCTCameraManager.m in Sources */,
4107014D1ACB732B00C6AA39 /* RCTCamera.m in Sources */,
9FE592B31CA3CBF500788287 /* RCTSensorOrientationChecker.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};

View File

@ -61,17 +61,13 @@ typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
@property (nonatomic, assign) NSInteger videoTarget;
@property (nonatomic, assign) NSInteger orientation;
@property (nonatomic, assign) BOOL mirrorImage;
@property (nonatomic, strong) NSArray* barCodeTypes;
@property (nonatomic, strong) RCTPromiseResolveBlock videoResolve;
@property (nonatomic, strong) RCTPromiseRejectBlock videoReject;
@property (nonatomic, strong) RCTCamera *camera;
- (void)changeAspect:(NSString *)aspect;
- (void)changeCamera:(NSInteger)camera;
- (void)changeOrientation:(NSInteger)orientation;
- (void)changeMirrorImage:(BOOL)mirrorImage;
- (void)changeFlashMode:(NSInteger)flashMode;
- (void)changeTorchMode:(NSInteger)torchMode;
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position;
- (void)capture:(NSDictionary*)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;
- (void)getFOV:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;

View File

@ -9,6 +9,13 @@
#import <AssetsLibrary/ALAssetsLibrary.h>
#import <AVFoundation/AVFoundation.h>
#import <ImageIO/ImageIO.h>
#import "RCTSensorOrientationChecker.h"
@interface RCTCameraManager ()
@property (strong, nonatomic) RCTSensorOrientationChecker * sensorOrientationChecker;
@end
@implementation RCTCameraManager
@ -30,14 +37,6 @@ RCT_EXPORT_MODULE();
return [[RCTCamera alloc] initWithManager:self bridge:self.bridge];
}
RCT_EXPORT_VIEW_PROPERTY(aspect, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(type, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(flashMode, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(torchMode, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(keepAwake, BOOL);
RCT_EXPORT_VIEW_PROPERTY(mirrorImage, BOOL);
- (NSDictionary *)constantsToExport
{
return @{
@ -47,26 +46,26 @@ RCT_EXPORT_VIEW_PROPERTY(mirrorImage, BOOL);
@"fill": @(RCTCameraAspectFill)
},
@"BarCodeType": @{
@"upce": AVMetadataObjectTypeUPCECode,
@"code39": AVMetadataObjectTypeCode39Code,
@"code39mod43": AVMetadataObjectTypeCode39Mod43Code,
@"ean13": AVMetadataObjectTypeEAN13Code,
@"ean8": AVMetadataObjectTypeEAN8Code,
@"code93": AVMetadataObjectTypeCode93Code,
@"code138": AVMetadataObjectTypeCode128Code,
@"pdf417": AVMetadataObjectTypePDF417Code,
@"qr": AVMetadataObjectTypeQRCode,
@"aztec": AVMetadataObjectTypeAztecCode
#ifdef AVMetadataObjectTypeInterleaved2of5Code
,@"interleaved2of5": AVMetadataObjectTypeInterleaved2of5Code
# endif
#ifdef AVMetadataObjectTypeITF14Code
,@"itf14": AVMetadataObjectTypeITF14Code
# endif
#ifdef AVMetadataObjectTypeDataMatrixCode
,@"datamatrix": AVMetadataObjectTypeDataMatrixCode
# endif
},
@"upce": AVMetadataObjectTypeUPCECode,
@"code39": AVMetadataObjectTypeCode39Code,
@"code39mod43": AVMetadataObjectTypeCode39Mod43Code,
@"ean13": AVMetadataObjectTypeEAN13Code,
@"ean8": AVMetadataObjectTypeEAN8Code,
@"code93": AVMetadataObjectTypeCode93Code,
@"code138": AVMetadataObjectTypeCode128Code,
@"pdf417": AVMetadataObjectTypePDF417Code,
@"qr": AVMetadataObjectTypeQRCode,
@"aztec": AVMetadataObjectTypeAztecCode
#ifdef AVMetadataObjectTypeInterleaved2of5Code
,@"interleaved2of5": AVMetadataObjectTypeInterleaved2of5Code
# endif
#ifdef AVMetadataObjectTypeITF14Code
,@"itf14": AVMetadataObjectTypeITF14Code
# endif
#ifdef AVMetadataObjectTypeDataMatrixCode
,@"datamatrix": AVMetadataObjectTypeDataMatrixCode
# endif
},
@"Type": @{
@"front": @(RCTCameraTypeFront),
@"back": @(RCTCameraTypeBack)
@ -111,33 +110,141 @@ RCT_EXPORT_VIEW_PROPERTY(mirrorImage, BOOL);
};
}
- (NSArray *)getBarCodeTypes {
return @[
AVMetadataObjectTypeUPCECode,
AVMetadataObjectTypeCode39Code,
AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code,
AVMetadataObjectTypeCode93Code,
AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code,
AVMetadataObjectTypeQRCode,
AVMetadataObjectTypeAztecCode
#ifdef AVMetadataObjectTypeInterleaved2of5Code
,AVMetadataObjectTypeInterleaved2of5Code
# endif
#ifdef AVMetadataObjectTypeITF14Code
,AVMetadataObjectTypeITF14Code
# endif
#ifdef AVMetadataObjectTypeDataMatrixCode
,AVMetadataObjectTypeDataMatrixCode
# endif
];
RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(defaultOnFocusComponent, BOOL);
RCT_EXPORT_VIEW_PROPERTY(onFocusChanged, BOOL);
RCT_EXPORT_VIEW_PROPERTY(onZoomChanged, BOOL);
RCT_CUSTOM_VIEW_PROPERTY(aspect, NSInteger, RCTCamera) {
NSInteger aspect = [RCTConvert NSInteger:json];
NSString *aspectString;
switch (aspect) {
default:
case RCTCameraAspectFill:
aspectString = AVLayerVideoGravityResizeAspectFill;
break;
case RCTCameraAspectFit:
aspectString = AVLayerVideoGravityResizeAspect;
break;
case RCTCameraAspectStretch:
aspectString = AVLayerVideoGravityResize;
break;
}
self.previewLayer.videoGravity = aspectString;
}
RCT_EXPORT_VIEW_PROPERTY(defaultOnFocusComponent, BOOL);
RCT_EXPORT_VIEW_PROPERTY(onFocusChanged, BOOL)
RCT_EXPORT_VIEW_PROPERTY(onZoomChanged, BOOL)
RCT_CUSTOM_VIEW_PROPERTY(type, NSInteger, RCTCamera) {
NSInteger type = [RCTConvert NSInteger:json];
self.presetCamera = type;
if (self.session.isRunning) {
dispatch_async(self.sessionQueue, ^{
AVCaptureDevice *currentCaptureDevice = [self.videoCaptureDeviceInput device];
AVCaptureDevicePosition position = (AVCaptureDevicePosition)type;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
if (captureDevice == nil) {
return;
}
self.presetCamera = type;
NSError *error = nil;
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error || captureDeviceInput == nil)
{
NSLog(@"%@", error);
return;
}
[self.session beginConfiguration];
[self.session removeInput:self.videoCaptureDeviceInput];
if ([self.session canAddInput:captureDeviceInput])
{
[self.session addInput:captureDeviceInput];
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
self.videoCaptureDeviceInput = captureDeviceInput;
}
else
{
[self.session addInput:self.videoCaptureDeviceInput];
}
[self.session commitConfiguration];
});
}
[self initializeCaptureSessionInput:AVMediaTypeVideo];
}
RCT_CUSTOM_VIEW_PROPERTY(flashMode, NSInteger, RCTCamera) {
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
NSInteger *flashMode = [RCTConvert NSInteger:json];
if (![device hasFlash]) return;
if (![device lockForConfiguration:&error]) {
NSLog(@"%@", error);
return;
}
if (device.hasFlash && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
}
[device unlockForConfiguration];
}
RCT_CUSTOM_VIEW_PROPERTY(torchMode, NSInteger, RCTCamera) {
dispatch_async(self.sessionQueue, ^{
NSInteger *torchMode = [RCTConvert NSInteger:json];
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
if (![device hasTorch]) return;
if (![device lockForConfiguration:&error]) {
NSLog(@"%@", error);
return;
}
[device setTorchMode: torchMode];
[device unlockForConfiguration];
});
}
RCT_CUSTOM_VIEW_PROPERTY(keepAwake, BOOL, RCTCamera) {
BOOL enabled = [RCTConvert BOOL:json];
[UIApplication sharedApplication].idleTimerDisabled = enabled;
}
RCT_CUSTOM_VIEW_PROPERTY(mirrorImage, BOOL, RCTCamera) {
self.mirrorImage = [RCTConvert BOOL:json];
}
RCT_CUSTOM_VIEW_PROPERTY(barCodeTypes, NSArray, RCTCamera) {
self.barCodeTypes = [RCTConvert NSArray:json];
}
RCT_CUSTOM_VIEW_PROPERTY(captureAudio, BOOL, RCTCamera) {
RCTLog(@"capturing audio");
BOOL captureAudio = [RCTConvert BOOL:json];
if (captureAudio) {
[self initializeCaptureSessionInput:AVMediaTypeAudio];
}
}
- (NSArray *)customDirectEventTypes
{
@ -152,6 +259,8 @@ RCT_EXPORT_VIEW_PROPERTY(onZoomChanged, BOOL)
self.mirrorImage = false;
self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
self.sensorOrientationChecker = [RCTSensorOrientationChecker new];
}
return self;
}
@ -177,7 +286,7 @@ RCT_EXPORT_METHOD(checkDeviceAuthorizationStatus:(RCTPromiseResolveBlock)resolve
RCT_EXPORT_METHOD(checkVideoAuthorizationStatus:(RCTPromiseResolveBlock)resolve
reject:(__unused RCTPromiseRejectBlock)reject) {
__block NSString *mediaType = AVMediaTypeVideo;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
resolve(@(granted));
}];
@ -192,67 +301,6 @@ RCT_EXPORT_METHOD(checkAudioAuthorizationStatus:(RCTPromiseResolveBlock)resolve
}];
}
RCT_EXPORT_METHOD(changeCamera:(NSInteger)camera) {
dispatch_async(self.sessionQueue, ^{
AVCaptureDevice *currentCaptureDevice = [self.videoCaptureDeviceInput device];
AVCaptureDevicePosition position = (AVCaptureDevicePosition)camera;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
if (captureDevice == nil) {
return;
}
self.presetCamera = camera;
NSError *error = nil;
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error || captureDeviceInput == nil)
{
NSLog(@"%@", error);
return;
}
[self.session beginConfiguration];
[self.session removeInput:self.videoCaptureDeviceInput];
if ([self.session canAddInput:captureDeviceInput])
{
[self.session addInput:captureDeviceInput];
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
self.videoCaptureDeviceInput = captureDeviceInput;
}
else
{
[self.session addInput:self.videoCaptureDeviceInput];
}
[self.session commitConfiguration];
});
}
RCT_EXPORT_METHOD(changeAspect:(NSString *)aspect) {
self.previewLayer.videoGravity = aspect;
}
RCT_EXPORT_METHOD(changeFlashMode:(NSInteger)flashMode) {
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
if (![device hasFlash]) return;
if (![device lockForConfiguration:&error]) {
NSLog(@"%@", error);
return;
}
[self setFlashMode:flashMode forDevice:device];
[device unlockForConfiguration];
}
RCT_EXPORT_METHOD(changeOrientation:(NSInteger)orientation) {
[self setOrientation:orientation];
if (self.previewLayer.connection.isVideoOrientationSupported) {
@ -260,23 +308,6 @@ RCT_EXPORT_METHOD(changeOrientation:(NSInteger)orientation) {
}
}
RCT_EXPORT_METHOD(changeMirrorImage:(BOOL)mirrorImage) {
self.mirrorImage = mirrorImage;
}
RCT_EXPORT_METHOD(changeTorchMode:(NSInteger)torchMode) {
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
if (![device hasTorch]) return;
if (![device lockForConfiguration:&error]) {
NSLog(@"%@", error);
return;
}
[device setTorchMode: torchMode];
[device unlockForConfiguration];
}
RCT_EXPORT_METHOD(capture:(NSDictionary *)options
resolve:(RCTPromiseResolveBlock)resolve
reject:(RCTPromiseRejectBlock)reject) {
@ -364,7 +395,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
if ([self.session canAddOutput:metadataOutput]) {
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
[self.session addOutput:metadataOutput];
[metadataOutput setMetadataObjectTypes:metadataOutput.availableMetadataObjectTypes];
[metadataOutput setMetadataObjectTypes:self.barCodeTypes];
self.metadataOutput = metadataOutput;
}
@ -401,7 +432,15 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
- (void)initializeCaptureSessionInput:(NSString *)type {
dispatch_async(self.sessionQueue, ^{
if (type == AVMediaTypeAudio) {
for (AVCaptureDeviceInput* input in [self.session inputs]) {
if ([input.device hasMediaType:AVMediaTypeAudio]) {
// If an audio input has been configured we don't need to set it up again
return;
}
}
}
[self.session beginConfiguration];
NSError *error = nil;
@ -425,10 +464,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
return;
}
if (type == AVMediaTypeAudio) {
[self.session removeInput:self.audioCaptureDeviceInput];
}
else if (type == AVMediaTypeVideo) {
if (type == AVMediaTypeVideo) {
[self.session removeInput:self.videoCaptureDeviceInput];
}
@ -477,7 +513,8 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
NSData *imageData = UIImageJPEGRepresentation(image, 1.0);
[self saveImage:imageData target:target metadata:nil resolve:resolve reject:reject];
#else
[[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:self.previewLayer.connection.videoOrientation];
[self.sensorOrientationChecker getDeviceOrientationWithBlock:^(UIInterfaceOrientation orientation) {
[[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[self.sensorOrientationChecker convertToAVCaptureVideoOrientation: orientation]];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
@ -540,6 +577,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
}
}];
}];
#endif
});
}
@ -549,7 +587,8 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
NSString *responseString;
if (target == RCTCameraCaptureTargetMemory) {
responseString = [imageData base64EncodedStringWithOptions:0];
resolve(@{@"data":[imageData base64EncodedStringWithOptions:0]});
return;
}
else if (target == RCTCameraCaptureTargetDisk) {
@ -574,7 +613,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
else if (target == RCTCameraCaptureTargetCameraRoll) {
[[[ALAssetsLibrary alloc] init] writeImageDataToSavedPhotosAlbum:imageData metadata:metadata completionBlock:^(NSURL* url, NSError* error) {
if (error == nil) {
resolve([url absoluteString]);
resolve(@{@"path":[url absoluteString]});
}
else {
reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
@ -582,7 +621,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
}];
return;
}
resolve(responseString);
resolve(@{@"path":responseString});
}
- (CGImageRef)newCGImageRotatedByAngle:(CGImageRef)imgRef angle:(CGFloat)angle
@ -627,7 +666,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej
return;
}
if ([options valueForKey:@"audio"]) {
if ([[options valueForKey:@"audio"] boolValue]) {
[self initializeCaptureSessionInput:AVMediaTypeAudio];
}
@ -667,7 +706,6 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
BOOL recordSuccess = YES;
if ([error code] != noErr) {
// A problem occurred: Find out if the recording was successful.
@ -681,6 +719,31 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
return;
}
AVURLAsset* videoAsAsset = [AVURLAsset URLAssetWithURL:outputFileURL options:nil];
AVAssetTrack* videoTrack = [[videoAsAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
float videoWidth;
float videoHeight;
CGSize videoSize = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if ((txf.tx == videoSize.width && txf.ty == videoSize.height) || (txf.tx == 0 && txf.ty == 0)) {
// Video recorded in landscape orientation
videoWidth = videoSize.width;
videoHeight = videoSize.height;
} else {
// Video recorded in portrait orientation, so have to swap reported width/height
videoWidth = videoSize.height;
videoHeight = videoSize.width;
}
NSMutableDictionary *videoInfo = [NSMutableDictionary dictionaryWithDictionary:@{
@"duration":[NSNumber numberWithFloat:CMTimeGetSeconds(videoAsAsset.duration)],
@"width":[NSNumber numberWithFloat:videoWidth],
@"height":[NSNumber numberWithFloat:videoHeight],
@"size":[NSNumber numberWithLongLong:captureOutput.recordedFileSize],
}];
if (self.videoTarget == RCTCameraCaptureTargetCameraRoll) {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
@ -690,7 +753,8 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
return;
}
self.videoResolve([assetURL absoluteString]);
[videoInfo setObject:[assetURL absoluteString] forKey:@"path"];
self.videoResolve(videoInfo);
}];
}
}
@ -707,7 +771,8 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
return;
}
self.videoResolve(fullPath);
[videoInfo setObject:fullPath forKey:@"path"];
self.videoResolve(videoInfo);
}
else if (self.videoTarget == RCTCameraCaptureTargetTemp) {
NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
@ -721,7 +786,8 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
return;
}
self.videoResolve(fullPath);
[videoInfo setObject:fullPath forKey:@"path"];
self.videoResolve(videoInfo);
}
else {
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Target not supported"));
@ -731,8 +797,8 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
for (AVMetadataMachineReadableCodeObject *metadata in metadataObjects) {
for (id barcodeType in [self getBarCodeTypes]) {
if (metadata.type == barcodeType) {
for (id barcodeType in self.barCodeTypes) {
if ([metadata.type isEqualToString:barcodeType]) {
// Transform the meta-data coordinates to screen coords
AVMetadataMachineReadableCodeObject *transformed = (AVMetadataMachineReadableCodeObject *)[_previewLayer transformedMetadataObjectForMetadataObject:metadata];
@ -775,24 +841,6 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
return captureDevice;
}
- (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if (device.hasFlash && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
}
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);

View File

@ -0,0 +1,21 @@
//
// RCTSensorOrientationChecker.h
// RCTCamera
//
// Created by Radu Popovici on 24/03/16.
//
//
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
typedef void (^RCTSensorCallback) (UIInterfaceOrientation orientation);
@interface RCTSensorOrientationChecker : NSObject
@property (assign, nonatomic) UIInterfaceOrientation orientation;
- (void)getDeviceOrientationWithBlock:(RCTSensorCallback)callback;
- (AVCaptureVideoOrientation)convertToAVCaptureVideoOrientation:(UIInterfaceOrientation)orientation;
@end

View File

@ -0,0 +1,105 @@
//
// RCTSensorOrientationChecker.m
// RCTCamera
//
// Created by Radu Popovici on 24/03/16.
//
//
#import "RCTSensorOrientationChecker.h"
#import <CoreMotion/CoreMotion.h>
@interface RCTSensorOrientationChecker ()
@property (strong, nonatomic) CMMotionManager * motionManager;
@property (strong, nonatomic) RCTSensorCallback orientationCallback;
@end
@implementation RCTSensorOrientationChecker
- (instancetype)init
{
self = [super init];
if (self) {
// Initialization code
self.motionManager = [[CMMotionManager alloc] init];
self.motionManager.accelerometerUpdateInterval = 0.2;
self.motionManager.gyroUpdateInterval = 0.2;
self.orientationCallback = nil;
}
return self;
}
- (void)dealloc
{
[self pause];
}
- (void)resume
{
__weak __typeof(self) weakSelf = self;
[self.motionManager startAccelerometerUpdatesToQueue:[NSOperationQueue new]
withHandler:^(CMAccelerometerData *accelerometerData, NSError *error) {
if (!error) {
self.orientation = [weakSelf getOrientationBy:accelerometerData.acceleration];
}
if (self.orientationCallback) {
self.orientationCallback(self.orientation);
}
}];
}
- (void)pause
{
[self.motionManager stopAccelerometerUpdates];
}
- (void)getDeviceOrientationWithBlock:(RCTSensorCallback)callback
{
__weak __typeof(self) weakSelf = self;
self.orientationCallback = ^(UIInterfaceOrientation orientation) {
if (callback) {
callback(orientation);
}
weakSelf.orientationCallback = nil;
[weakSelf pause];
};
[self resume];
}
- (UIInterfaceOrientation)getOrientationBy:(CMAcceleration)acceleration
{
if(acceleration.x >= 0.75) {
return UIInterfaceOrientationLandscapeLeft;
}
if(acceleration.x <= -0.75) {
return UIInterfaceOrientationLandscapeRight;
}
if(acceleration.y >= -0.75) {
return UIInterfaceOrientationPortrait;
}
if(acceleration.y >= 0.75) {
return UIInterfaceOrientationPortraitUpsideDown;
}
return [[UIApplication sharedApplication] statusBarOrientation];
}
- (AVCaptureVideoOrientation)convertToAVCaptureVideoOrientation:(UIInterfaceOrientation)orientation
{
switch (orientation) {
case UIInterfaceOrientationPortrait:
return AVCaptureVideoOrientationPortrait;
case UIInterfaceOrientationPortraitUpsideDown:
return AVCaptureVideoOrientationPortraitUpsideDown;
case UIInterfaceOrientationLandscapeLeft:
return AVCaptureVideoOrientationLandscapeLeft;
case UIInterfaceOrientationLandscapeRight:
return AVCaptureVideoOrientationLandscapeRight;
default:
return 0; // unknown
}
}
@end