add flash and change camera support

This commit is contained in:
Ran Greenberg 2016-06-02 14:08:06 +03:00
parent e80bdc8a25
commit 838a3a024a
11 changed files with 576 additions and 124 deletions

View File

@ -5,41 +5,48 @@ import {
Text,
View,
ListView,
TouchableOpacity
TouchableOpacity,
Image,
} from 'react-native';
import {CameraKitCamera} from 'react-native-camera-kit';
import _ from 'lodash';
import Immutable from 'seamless-immutable';
import {
CameraKitGallery,
CameraKitCamera,
} from 'react-native-camera-kit';
const FLASH_MODE_AUTO = "auto";
const FLASH_MODE_ON = "on";
const FLASH_MODE_OFF = "off";
class example extends Component {
constructor(props) {
super(props);
const ds = new ListView.DataSource({rowHasChanged: (r1, r2) => r1 !== r2});
this.state = {
albumsName: (new ListView.DataSource({rowHasChanged: (r1, r2) => r1 !== r2})),
albums:[],
albumsDS: ds,
shouldOpenCamera: false,
shouldShowListView: false,
image:{imageURI:""},
flashMode:FLASH_MODE_AUTO
}
}
render() {
//console.error(CameraKitCamera);
return (
<View style={styles.container}>
<TouchableOpacity style={{marginTop: 10}} onPress={this.onGetAlbumsPressed.bind(this)}>
<TouchableOpacity style={{marginTop: 10, backgroundColor: 'black'}} onPress={this.onGetAlbumsPressed.bind(this)}>
<Text style={styles.button}>get albums</Text>
</TouchableOpacity>
<ListView
dataSource={this.state.albumsName}
renderRow={(rowData) => <Text>{rowData}</Text>}
style={styles.listView}
/>
<TouchableOpacity style={{marginTop: 10}} onPress={this.onOpenCameraPressed.bind(this)}>
<Text style={styles.button}>{this.state.shouldOpenCamera ? "close camera" : "open camera"}</Text>
</TouchableOpacity>
{this._renderListView()}
{this._renderCameraView()}
</View>
@ -47,39 +54,133 @@ class example extends Component {
);
}
_renderListView() {
if (this.state.shouldShowListView) {
return(
<ListView
style={styles.listView}
dataSource={this.state.albumsDS}
renderRow={(rowData) =>
this._renderRow(rowData)
}
/>
)
}
}
_renderRow(rowData) {
//console.log('ran', rowData);
return (
<View style={styles.row}>
<Text style={styles.text}>
{rowData}
</Text>
</View>
<View style={{backgroundColor: 'green'}}>
<Image
style={{flex:1}}
source={{uri: rowData.image, scale: 3}}
/>
<TouchableOpacity style={{marginTop: 10}} onPress={this.onAlbumNamePressed.bind(this, rowData.albumName)}>
<Text style={{fontSize: 18}}>{rowData.albumName}</Text>
</TouchableOpacity>
</View>
)
}
async onGetAlbumsPressed() {
const albumsNames = await CameraKitGallery.getAlbums();
this.setState({albumsName:this.state.albumsName.cloneWithRows(albumsNames)});
const albums = await CameraKitGallery.getAlbums();
const albumsNames = _.map(albums, 'albumName');
const albumsThumbnails = _.map(albums, 'albumName');
this.setState({...this.state, albumsDS:this.state.albumsDS.cloneWithRows(albums), albums:albums, shouldShowListView: true});
}
async onAlbumNamePressed(albumName) {
let base64Image = await CameraKitGallery.getThumbnailForAlbumName(albumName);
let newAlbums = _.uniq(this.state.albums);
let albumWithImage =
base64Image = 'data:image/png;base64,' + base64Image;
let album = _.find(newAlbums, function(o) {
return o.albumName === albumName;
});
const albumIndex = _.indexOf(newAlbums, album);
album = {...album, image:base64Image };
newAlbums[albumIndex] = album;
this.setState({albumsDS:this.state.albumsDS.cloneWithRows(newAlbums), albums:newAlbums});
}
_renderCameraView() {
if (this.state.shouldOpenCamera) {
return (
<CameraKitCamera style={{margin:8, width: 300, height: 300}}/>
<View style={{ flex:1, backgroundColor: 'gray', marginHorizontal: 8, marginBottom:8}}>
<TouchableOpacity style={{flex: 1, flexDirection:'row'}} onPress={this.onTakeIt.bind(this)}>
<CameraKitCamera
ref={(cam) => {
this.camera = cam;
}}
style={{flex: 1}}
/>
</TouchableOpacity>
<View style={{flexDirection: 'row'}}>
<Image
style={{ flexDirection:'row', backgroundColor: 'gray', width: 100, height: 100}}
source={{uri: this.state.image.imageURI, scale: 3}}
/>
<TouchableOpacity style={{alignSelf:'center', marginHorizontal: 4}} onPress={this.onSwitchCameraPressed.bind(this)}>
<Text>switch camera</Text>
</TouchableOpacity>
<View style={{ flexDirection:'column', justifyContent: 'space-between'}}>
<TouchableOpacity style={{ marginHorizontal: 4}} onPress={this.onSetFlash.bind(this, FLASH_MODE_AUTO)}>
<Text>flash auto</Text>
</TouchableOpacity>
<TouchableOpacity style={{ marginHorizontal: 4, }} onPress={this.onSetFlash.bind(this, FLASH_MODE_ON)}>
<Text>flash on</Text>
</TouchableOpacity>
<TouchableOpacity style={{ marginHorizontal: 4,}} onPress={this.onSetFlash.bind(this, FLASH_MODE_OFF)}>
<Text>flash off</Text>
</TouchableOpacity>
</View>
</View>
</View>
)
}
}
async onSwitchCameraPressed() {
const success = await this.camera.changeCamera();
}
async onSetFlash(flashMode) {
const success = await this.camera.setFleshMode(flashMode);
}
async onTakeIt() {
const imageURI = await this.camera.capture(false);
let newImage = {imageURI: imageURI};
this.setState({...this.state, image:newImage});
}
onOpenCameraPressed() {
this.setState({shouldOpenCamera:!this.state.shouldOpenCamera});
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
//justifyContent: 'center',
//alignItems: 'center',
backgroundColor: '#F5FCFF',
marginTop: 20
},
@ -103,14 +204,16 @@ const styles = StyleSheet.create({
flex: 1,
},
button: {
fontSize: 20
fontSize: 18,
alignSelf: 'center',
backgroundColor: 'green'
},
listView: {
flex: 1,
flexDirection:'column',
//flex:1,
//flexDirection:'column',
margin: 8,
backgroundColor: '#D6DAC2',
alignSelf: 'stretch'
//alignSelf: 'stretch'
},
});

View File

@ -31,7 +31,7 @@
* on the same Wi-Fi network.
*/
jsCodeLocation = [NSURL URLWithString:@"http://172.31.9.103:8081/index.ios.bundle?platform=ios&dev=true"];
jsCodeLocation = [NSURL URLWithString:@"http://172.31.8.209:8081/index.ios.bundle?platform=ios&dev=true"];
/**
* OPTION 2

View File

@ -6,8 +6,9 @@
"start": "node node_modules/react-native/local-cli/cli.js start"
},
"dependencies": {
"react-native": "0.25.1",
"react": "0.14.5",
"react-native-camera-kit": "latest"
"react-native": "0.25.1",
"react-native-camera-kit": "latest",
"seamless-immutable": "^6.1.0"
}
}

View File

@ -8,7 +8,6 @@
/* Begin PBXBuildFile section */
26550AE61CFC2437007FF2DF /* CKGalleryManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 26550AE51CFC2437007FF2DF /* CKGalleryManager.m */; };
26550AE81CFC2BCF007FF2DF /* Photos.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 26550AE71CFC2BCF007FF2DF /* Photos.framework */; };
26550AF61CFC7086007FF2DF /* CKCameraManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 26550AF51CFC7086007FF2DF /* CKCameraManager.m */; };
2685AA241CFD89A300E4A446 /* CKCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = 2685AA231CFD89A300E4A446 /* CKCamera.m */; };
/* End PBXBuildFile section */
@ -29,7 +28,6 @@
2646934E1CFB2A6B00F3A740 /* libReactNativeCameraKit.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libReactNativeCameraKit.a; sourceTree = BUILT_PRODUCTS_DIR; };
26550AE41CFC2437007FF2DF /* CKGalleryManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKGalleryManager.h; sourceTree = "<group>"; };
26550AE51CFC2437007FF2DF /* CKGalleryManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKGalleryManager.m; sourceTree = "<group>"; };
26550AE71CFC2BCF007FF2DF /* Photos.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Photos.framework; path = System/Library/Frameworks/Photos.framework; sourceTree = SDKROOT; };
26550AF41CFC7086007FF2DF /* CKCameraManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCameraManager.h; sourceTree = "<group>"; };
26550AF51CFC7086007FF2DF /* CKCameraManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CKCameraManager.m; sourceTree = "<group>"; };
2685AA221CFD89A300E4A446 /* CKCamera.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CKCamera.h; sourceTree = "<group>"; };
@ -41,7 +39,6 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
26550AE81CFC2BCF007FF2DF /* Photos.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@ -51,7 +48,6 @@
264693451CFB2A6B00F3A740 = {
isa = PBXGroup;
children = (
26550AE71CFC2BCF007FF2DF /* Photos.framework */,
264693501CFB2A6B00F3A740 /* ReactNativeCameraKit */,
2646934F1CFB2A6B00F3A740 /* Products */,
);

View File

@ -7,12 +7,20 @@
//
#import <UIKit/UIKit.h>
@import AVFoundation;
typedef void (^CaptureBlock)(NSString *imagePath);
typedef void (^CallbackBlock)(BOOL success);
@interface CKCamera : UIView
@property (nonatomic, readonly) AVCaptureDeviceInput *videoDeviceInput;
// api
- (void)snapStillImage:(BOOL)shouldSaveToCameraRoll success:(CaptureBlock)block;
- (void)changeCamera:(CallbackBlock)block;
- (void)setFlashMode:(AVCaptureFlashMode)flashMode callback:(CallbackBlock)block;
@end

View File

@ -6,10 +6,12 @@
// Copyright © 2016 Wix. All rights reserved.
//
@import Foundation;
@import Photos;
#import "CKCamera.h"
#import "UIView+React.h"
#import "RCTConvert.h"
#import <AVFoundation/AVFoundation.h>
static void * CapturingStillImageContext = &CapturingStillImageContext;
static void * SessionRunningContext = &SessionRunningContext;
@ -28,7 +30,7 @@ typedef NS_ENUM( NSInteger, CKSetupResult ) {
// Session management.
@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) AVCaptureSession *session;
@property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
@property (nonatomic, readwrite) AVCaptureDeviceInput *videoDeviceInput;
@property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
@ -37,15 +39,17 @@ typedef NS_ENUM( NSInteger, CKSetupResult ) {
@property (nonatomic, getter=isSessionRunning) BOOL sessionRunning;
@property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;
@end
@implementation CKCamera
#pragma mark - initializtion
- (instancetype)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self){
// Create the AVCaptureSession.
self.session = [[AVCaptureSession alloc] init];
@ -54,17 +58,19 @@ typedef NS_ENUM( NSInteger, CKSetupResult ) {
[self handleCameraPermission];
[self setupCaptionSession];
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
[self.layer addSublayer:self.previewLayer];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
return self;
}
-(void)setupCaptionSession {
// Setup the capture session.
// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
@ -91,6 +97,7 @@ typedef NS_ENUM( NSInteger, CKSetupResult ) {
if ( [self.session canAddInput:videoDeviceInput] ) {
[self.session addInput:videoDeviceInput];
self.videoDeviceInput = videoDeviceInput;
[CKCamera setFlashMode:AVCaptureFlashModeAuto forDevice:self.videoDeviceInput.device];
}
else {
NSLog( @"Could not add video device input to the session" );
@ -170,45 +177,243 @@ typedef NS_ENUM( NSInteger, CKSetupResult ) {
case CKSetupResultSuccess:
{
// Only setup observers and start the session running if setup succeeded.
[self addObservers];
// [self addObservers];
[self.session startRunning];
self.sessionRunning = self.session.isRunning;
break;
}
case CKSetupResultCameraNotAuthorized:
{
// dispatch_async( dispatch_get_main_queue(), ^{
// NSString *message = NSLocalizedString( @"AVCam doesn't have permission to use the camera, please change privacy settings", @"Alert message when the user has denied access to the camera" );
// UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
// UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
// [alertController addAction:cancelAction];
// // Provide quick access to Settings.
// UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"Settings", @"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) {
// [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]];
// }];
// [alertController addAction:settingsAction];
// [self presentViewController:alertController animated:YES completion:nil];
// } );
// dispatch_async( dispatch_get_main_queue(), ^{
// NSString *message = NSLocalizedString( @"AVCam doesn't have permission to use the camera, please change privacy settings", @"Alert message when the user has denied access to the camera" );
// UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
// UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
// [alertController addAction:cancelAction];
// // Provide quick access to Settings.
// UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"Settings", @"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) {
// [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]];
// }];
// [alertController addAction:settingsAction];
// [self presentViewController:alertController animated:YES completion:nil];
// } );
break;
}
case CKSetupResultSessionConfigurationFailed:
{
// dispatch_async( dispatch_get_main_queue(), ^{
// NSString *message = NSLocalizedString( @"Unable to capture media", @"Alert message when something goes wrong during capture session configuration" );
// UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
// UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
// [alertController addAction:cancelAction];
// [self presentViewController:alertController animated:YES completion:nil];
// } );
// dispatch_async( dispatch_get_main_queue(), ^{
// NSString *message = NSLocalizedString( @"Unable to capture media", @"Alert message when something goes wrong during capture session configuration" );
// UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
// UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
// [alertController addAction:cancelAction];
// [self presentViewController:alertController animated:YES completion:nil];
// } );
break;
}
}
} );
}
+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = devices.firstObject;
for ( AVCaptureDevice *device in devices ) {
if ( device.position == position ) {
captureDevice = device;
break;
}
}
return captureDevice;
}
- (void)setFlashMode:(AVCaptureFlashMode)flashMode callback:(CallbackBlock)block {
[CKCamera setFlashMode:flashMode forDevice:self.videoDeviceInput.device];
if (block) {
block(self.videoDeviceInput.device.flashMode == flashMode);
}
}
+ (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if ( device.hasFlash && [device isFlashModeSupported:flashMode] ) {
NSError *error = nil;
if ( [device lockForConfiguration:&error] ) {
device.flashMode = flashMode;
[device unlockForConfiguration];
}
else {
NSLog( @"Could not lock device for configuration: %@", error );
}
}
}
#pragma mark - actions
- (void)snapStillImage:(BOOL)shouldSaveToCameraRoll success:(CaptureBlock)block {
dispatch_async( self.sessionQueue, ^{
AVCaptureConnection *connection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
// Update the orientation on the still image output video connection before capturing.
connection.videoOrientation = self.previewLayer.connection.videoOrientation;
// Flash set to Auto for Still Capture.
// [CKCamera setFlashMode:AVCaptureFlashModeAuto forDevice:self.videoDeviceInput.device];
// Capture a still image.
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^( CMSampleBufferRef imageDataSampleBuffer, NSError *error ) {
if ( imageDataSampleBuffer ) {
// The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously.
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
[PHPhotoLibrary requestAuthorization:^( PHAuthorizationStatus status ) {
if ( status == PHAuthorizationStatusAuthorized ) {
NSURL *temporaryFileURL = [self saveToTmpFolder:imageData];
if (shouldSaveToCameraRoll) {
[self saveImageToCameraRoll:imageData temporaryFileURL:temporaryFileURL];
}
if (block) {
block(temporaryFileURL.description);
}
}
}];
}
else {
NSLog( @"Could not capture still image: %@", error );
}
}];
} );
}
-(void)changeCamera:(CallbackBlock)block
{
dispatch_async( self.sessionQueue, ^{
AVCaptureDevice *currentVideoDevice = self.videoDeviceInput.device;
AVCaptureDevicePosition preferredPosition = AVCaptureDevicePositionUnspecified;
AVCaptureDevicePosition currentPosition = currentVideoDevice.position;
switch ( currentPosition )
{
case AVCaptureDevicePositionUnspecified:
case AVCaptureDevicePositionFront:
preferredPosition = AVCaptureDevicePositionBack;
break;
case AVCaptureDevicePositionBack:
preferredPosition = AVCaptureDevicePositionFront;
break;
}
AVCaptureDevice *videoDevice = [CKCamera deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
[self.session beginConfiguration];
// Remove the existing device input first, since using the front and back camera simultaneously is not supported.
[self.session removeInput:self.videoDeviceInput];
if ( [self.session canAddInput:videoDeviceInput] ) {
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice];
[CKCamera setFlashMode:AVCaptureFlashModeAuto forDevice:videoDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:videoDevice];
[self.session addInput:videoDeviceInput];
self.videoDeviceInput = videoDeviceInput;
}
else {
[self.session addInput:self.videoDeviceInput];
}
AVCaptureConnection *connection = [self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ( connection.isVideoStabilizationSupported ) {
connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
[self.session commitConfiguration];
dispatch_async( dispatch_get_main_queue(), ^{
if (block) {
block(YES);
}
} );
} );
}
-(void)saveImageToCameraRoll:(NSData*)imageData temporaryFileURL:(NSURL*)temporaryFileURL{
// To preserve the metadata, we create an asset from the JPEG NSData representation.
// Note that creating an asset from a UIImage discards the metadata.
// In iOS 9, we can use -[PHAssetCreationRequest addResourceWithType:data:options].
// In iOS 8, we save the image to a temporary file and use +[PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:].
if ( [PHAssetCreationRequest class] ) {
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
[[PHAssetCreationRequest creationRequestForAsset] addResourceWithType:PHAssetResourceTypePhoto data:imageData options:nil];
} completionHandler:^( BOOL success, NSError *error ) {
if ( ! success ) {
NSLog( @"Error occurred while saving image to photo library: %@", error );
}
}];
}
else {
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
NSError *error = nil;
if ( error ) {
NSLog( @"Error occured while writing image data to a temporary file: %@", error );
}
else {
[PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:temporaryFileURL];
}
} completionHandler:^( BOOL success, NSError *error ) {
if ( ! success ) {
NSLog( @"Error occurred while saving image to photo library: %@", error );
}
}];
}
}
-(NSURL*)saveToTmpFolder:(NSData*)data {
NSString *temporaryFileName = [NSProcessInfo processInfo].globallyUniqueString;
NSString *temporaryFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[temporaryFileName stringByAppendingPathExtension:@"jpg"]];
NSURL *temporaryFileURL = [NSURL fileURLWithPath:temporaryFilePath];
NSError *error = nil;
[data writeToURL:temporaryFileURL options:NSDataWritingAtomic error:&error];
if ( error ) {
NSLog( @"Error occured while writing image data to a temporary file: %@", error );
}
else {
NSLog(@"YOU ROCK!");
}
return temporaryFileURL;
}
#pragma mark - observers
- (void)addObservers
{
[self.session addObserver:self forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:SessionRunningContext];
@ -225,21 +430,6 @@ typedef NS_ENUM( NSInteger, CKSetupResult ) {
}
+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = devices.firstObject;
for ( AVCaptureDevice *device in devices ) {
if ( device.position == position ) {
captureDevice = device;
break;
}
}
return captureDevice;
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if ( context == CapturingStillImageContext ) {
@ -257,12 +447,12 @@ typedef NS_ENUM( NSInteger, CKSetupResult ) {
else if ( context == SessionRunningContext ) {
BOOL isSessionRunning = [change[NSKeyValueChangeNewKey] boolValue];
// dispatch_async( dispatch_get_main_queue(), ^{
// // Only enable the ability to change camera if the device has more than one camera.
// self.cameraButton.enabled = isSessionRunning && ( [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo].count > 1 );
// self.recordButton.enabled = isSessionRunning;
// self.stillButton.enabled = isSessionRunning;
// } );
// dispatch_async( dispatch_get_main_queue(), ^{
// // Only enable the ability to change camera if the device has more than one camera.
// self.cameraButton.enabled = isSessionRunning && ( [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo].count > 1 );
// self.recordButton.enabled = isSessionRunning;
// self.stillButton.enabled = isSessionRunning;
// } );
}
else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];

View File

@ -6,9 +6,25 @@
// Copyright © 2016 Wix. All rights reserved.
//
@import AVFoundation;
#import "RCTViewManager.h"
#import "RCTConvert.h"
typedef NS_ENUM(NSInteger, CKCameraFlashMode) {
CKCameraFlashModeAuto,
CKCameraFlashModeOn,
CKCameraFlashModeOff
};
@interface RCTConvert(CKCameraFlashMode)
+ (CKCameraFlashMode)CKCameraFlashMode:(id)json;
@end
@interface CKCameraManager : RCTViewManager
@end

View File

@ -9,13 +9,69 @@
#import "CKCameraManager.h"
#import "CKCamera.h"
@implementation RCTConvert(CustomSegmentedSelectedLineAlign)
RCT_ENUM_CONVERTER(CKCameraFlashMode, (@{
@"auto": @(AVCaptureFlashModeAuto),
@"on": @(AVCaptureFlashModeOn),
@"off": @(AVCaptureFlashModeOff)
}), AVCaptureFlashModeAuto, integerValue)
@end
@interface CKCameraManager ()
@property (nonatomic, strong) CKCamera *camera;
@end
@implementation CKCameraManager
RCT_EXPORT_MODULE()
- (UIView *)view {
return [CKCamera new];
self.camera = [CKCamera new];
return self.camera;
}
RCT_EXPORT_METHOD(capture:(BOOL)shouldSaveToCameraRoll
resolve:(RCTPromiseResolveBlock)resolve
reject:(RCTPromiseRejectBlock)reject) {
[self.camera snapStillImage:shouldSaveToCameraRoll success:^(NSString *imagePath) {
if (imagePath) {
if (resolve) {
resolve(imagePath);
}
}
}];
}
RCT_EXPORT_METHOD(changeCamera:(RCTPromiseResolveBlock)resolve
reject:(RCTPromiseRejectBlock)reject) {
[self.camera changeCamera:^(BOOL success) {
if (success) {
if (resolve) {
resolve([NSNumber numberWithBool:success]);
}
}
}];
}
RCT_EXPORT_METHOD(setFlashMode:(CKCameraFlashMode)flashMode
resolve:(RCTPromiseResolveBlock)resolve
reject:(RCTPromiseRejectBlock)reject) {
[self.camera setFlashMode:flashMode callback:^(BOOL success) {
if (resolve) {
resolve([NSNumber numberWithBool:success]);
}
}];
}

View File

@ -11,61 +11,103 @@
#import "RCTConvert.h"
@implementation CKGallery : NSObject
//+(void)getAllAlbumsName:(RCTPromiseResolveBlock)resolve
// reject:(RCTPromiseRejectBlock)reject {
//
// NSMutableArray *albumsNames = [[NSMutableArray alloc] init];
//
// PHFetchOptions *userAlbumsOptions = [PHFetchOptions new];
// userAlbumsOptions.predicate = [NSPredicate predicateWithFormat:@"estimatedAssetCount > 0"];
//
// PHFetchResult *userAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeAlbum subtype:PHAssetCollectionSubtypeAny options:userAlbumsOptions];
//
// NSInteger albumsCount = [userAlbums count];
//
// [userAlbums enumerateObjectsUsingBlock:^(PHAssetCollection *collection, NSUInteger idx, BOOL *stop) {
// [albumsNames addObject:collection.localizedTitle];
// if (idx == albumsCount-1) {
// if (resolve) {
// resolve(@{@"albumsNames": albumsNames});
// }
// }
// }];
//
//
//}
@end
@implementation CKGalleryManager
RCT_EXPORT_MODULE();
RCT_EXPORT_METHOD(getAllAlbumsName:(RCTPromiseResolveBlock)resolve
reject:(RCTPromiseRejectBlock)reject)
{
NSMutableArray *albumsNames = [[NSMutableArray alloc] init];
-(PHFetchResult*)getAllAlbums {
PHFetchOptions *userAlbumsOptions = [PHFetchOptions new];
userAlbumsOptions.predicate = [NSPredicate predicateWithFormat:@"estimatedAssetCount > 0"];
PHFetchResult *userAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeAlbum subtype:PHAssetCollectionSubtypeAny options:userAlbumsOptions];
return userAlbums;
}
RCT_EXPORT_METHOD(getAllAlbumsName:(RCTPromiseResolveBlock)resolve
reject:(__unused RCTPromiseRejectBlock)reject)
{
NSMutableArray *albumsInfo = [[NSMutableArray alloc] init];
PHFetchResult *userAlbums = [self getAllAlbums];
NSInteger albumsCount = [userAlbums count];
PHFetchOptions *fetchOptions = [[PHFetchOptions alloc] init];
fetchOptions.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:YES]];
[userAlbums enumerateObjectsUsingBlock:^(PHAssetCollection *collection, NSUInteger idx, BOOL *stop) {
[albumsNames addObject:collection.localizedTitle];
NSMutableDictionary *albumInfoDict = [[NSMutableDictionary alloc] init];
albumInfoDict[@"albumName"] = collection.localizedTitle;
[albumsInfo addObject:albumInfoDict];
PHFetchResult *fetchResult = [PHAsset fetchKeyAssetsInAssetCollection:collection options:fetchOptions];
PHAsset *asset = [fetchResult firstObject];
if (idx == albumsCount-1) {
if (resolve) {
resolve(albumsNames);
resolve(albumsInfo);
}
}
}];
}
RCT_EXPORT_METHOD(getThumbnailForAlbumName:(NSString*)albumName
resolve:(RCTPromiseResolveBlock)resolve
reject:(RCTPromiseRejectBlock)reject)
{
NSInteger retinaScale = [UIScreen mainScreen].scale;
CGSize retinaSquare = CGSizeMake(100*retinaScale, 100*retinaScale);
PHImageRequestOptions *cropToSquare = [[PHImageRequestOptions alloc] init];
cropToSquare.resizeMode = PHImageRequestOptionsResizeModeExact;
PHFetchOptions *fetchOptions = [[PHFetchOptions alloc] init];
fetchOptions.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:YES]];
PHFetchResult *userAlbums = [self getAllAlbums];
[userAlbums enumerateObjectsUsingBlock:^(PHAssetCollection *collection, NSUInteger idx, BOOL *stop) {
if ([albumName isEqualToString:collection.localizedTitle]) {
*stop = YES;
PHFetchResult *fetchResult = [PHAsset fetchKeyAssetsInAssetCollection:collection options:fetchOptions];
PHAsset *asset = [fetchResult firstObject];
CGFloat cropSideLength = MIN(asset.pixelWidth, asset.pixelHeight);
CGRect square = CGRectMake(0, 0, cropSideLength, cropSideLength);
CGRect cropRect = CGRectApplyAffineTransform(square,
CGAffineTransformMakeScale(1.0 / asset.pixelWidth,
1.0 / asset.pixelHeight));
// make sure resolve call only once
__block BOOL isInvokeResolve = NO;
[[PHImageManager defaultManager]
requestImageForAsset:(PHAsset *)asset
targetSize:retinaSquare
contentMode:PHImageContentModeAspectFit
options:cropToSquare
resultHandler:^(UIImage *result, NSDictionary *info) {
NSData *imageData = UIImageJPEGRepresentation(result, 1.0);
NSString *encodedString = [imageData base64Encoding];
if (resolve && !isInvokeResolve) {
isInvokeResolve = YES;
resolve(encodedString);
}
}];
}
}];
}

View File

@ -1,12 +1,46 @@
import React, {Component} from 'react';
import {
requireNativeComponent,
NativeModules
} from 'react-native';
const NativeCamera = requireNativeComponent('CKCamera', null);
const NativeCameraAction = NativeModules.CKCameraManager;
export default class CameraKitCamera extends React.Component {
render() {
return <NativeCamera {...this.props}/>
}
}
async capture(saveToCameraRoll = true) {
const imageTmpPath = await NativeCameraAction.capture(saveToCameraRoll);
console.log(imageTmpPath);
return imageTmpPath;
}
async changeCamera() {
const success = await NativeCameraAction.changeCamera();
console.log(success);
return success;
}
async setFleshMode(flashMode = 'auto') {
console.log(flashMode);
const success = await NativeCameraAction.setFlashMode(flashMode);
console.log(success);
return success;
}
}
//export async function takePhoto() {
// console.log('#################################');
// console.log(NativeCamera);
// const albumsThumbnail = await NativeCamera.foo();
// //return albumsThumbnail;
//}
//
//export const CameraActions = {
// takePhoto
//}

View File

@ -10,7 +10,13 @@ async function getAlbums() {
return albumsName;
}
export default {
getAlbums
async function getThumbnailForAlbumName(albumName) {
const albumsThumbnail = await CKGallery.getThumbnailForAlbumName(albumName);
return albumsThumbnail;
}
export default {
getAlbums,
getThumbnailForAlbumName
}