Updates for React Native 0.3.11 updates. Major breaking changes here. Emphasis on the breaking.

This commit is contained in:
Lochlan Wansbrough 2015-04-10 19:56:30 -07:00
parent b6cac4fa9a
commit 488e5327f7
5 changed files with 208 additions and 229 deletions

View File

@ -1,23 +1,14 @@
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "ViewfinderView.h"
#import "UIView+React.h"
@class RCTCameraManager;
@interface RCTCamera : UIView
@property (nonatomic) ViewfinderView *viewfinder;
@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) AVCaptureSession *session;
@property (nonatomic) AVCaptureDeviceInput *captureDeviceInput;
@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
@property (nonatomic) id runtimeErrorHandlingObserver;
@property (nonatomic) NSInteger presetCamera;
@property (nonatomic) RCTCameraManager *manager;
- (void)changeCamera:(NSInteger)camera;
- (void)changeOrientation:(NSInteger)orientation;
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position;
- (void)takePicture:(RCTResponseSenderBlock)callback;
- (id)initWithManager:(RCTCameraManager*)manager;
@end

View File

@ -4,101 +4,51 @@
#import "RCTLog.h"
#import "RCTUtils.h"
#import "ViewfinderView.h"
#import "UIImage+Resize.h"
#import <AVFoundation/AVFoundation.h>
@implementation RCTCamera
- (void)setAspect:(NSString *)aspect
{
[(AVCaptureVideoPreviewLayer *)[[self viewfinder] layer] setVideoGravity:aspect];
[(AVCaptureVideoPreviewLayer *)[_viewfinder layer] setVideoGravity:aspect];
}
- (void)setType:(NSInteger)camera
{
if ([[self session] isRunning]) {
[self changeCamera:camera];
if (self.manager.session.isRunning) {
[self.manager changeCamera:camera];
}
else {
[self setPresetCamera:camera];
self.manager.presetCamera = camera;
}
}
- (void)setOrientation:(NSInteger)orientation
{
[self changeOrientation:orientation];
[self.manager changeOrientation:orientation];
}
- (id)init
- (id)initWithManager:(RCTCameraManager*)manager
{
if ((self = [super init])) {
[self setViewfinder:[[ViewfinderView alloc] init]];
[self setSession:[[AVCaptureSession alloc] init]];
[[self session] setSessionPreset:AVCaptureSessionPresetHigh];
[[self viewfinder] setSession:[self session]];
[self addSubview:[self viewfinder]];
[[self session] startRunning];
dispatch_queue_t sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
dispatch_async(sessionQueue, ^{
NSError *error = nil;
NSInteger presetCamera = [self presetCamera];
if ([self presetCamera] == AVCaptureDevicePositionUnspecified) {
presetCamera = AVCaptureDevicePositionBack;
}
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:presetCamera];
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
if ([[self session] canAddInput:captureDeviceInput])
{
[[self session] addInput:captureDeviceInput];
[self setCaptureDeviceInput:captureDeviceInput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([[self session] canAddOutput:stillImageOutput])
{
[stillImageOutput setOutputSettings:@{AVVideoCodecKey : AVVideoCodecJPEG}];
[[self session] addOutput:stillImageOutput];
[self setStillImageOutput:stillImageOutput];
}
__weak RCTCamera *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:[self session] queue:nil usingBlock:^(NSNotification *note) {
RCTCamera *strongSelf = weakSelf;
dispatch_async([strongSelf sessionQueue], ^{
// Manually restarting the session since it must have been stopped due to an error.
[[strongSelf session] startRunning];
});
}]];
});
self.manager = manager;
self.viewfinder = [[ViewfinderView alloc] init];
self.viewfinder.session = self.manager.session;
}
return self;
}
- (NSArray *)reactSubviews
{
NSArray *subviews = @[[self viewfinder]];
NSArray *subviews = @[self.viewfinder];
return subviews;
}
- (void)layoutSubviews
{
[super layoutSubviews];
[[self viewfinder] setFrame:[self bounds]];
[self.viewfinder setFrame:self.bounds];
}
- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex
@ -113,142 +63,4 @@
return;
}
- (void)changeCamera:(NSInteger)camera {
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *currentCaptureDevice = [[self captureDeviceInput] device];
AVCaptureDevicePosition position = (AVCaptureDevicePosition)camera;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
NSError *error = nil;
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
[[self session] beginConfiguration];
[[self session] removeInput:[self captureDeviceInput]];
if ([[self session] canAddInput:captureDeviceInput])
{
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
[self setFlashMode:AVCaptureFlashModeAuto forDevice:captureDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
[[self session] addInput:captureDeviceInput];
[self setCaptureDeviceInput:captureDeviceInput];
}
else
{
[[self session] addInput:[self captureDeviceInput]];
}
[[self session] commitConfiguration];
});
}
- (void)changeOrientation:(NSInteger)orientation {
[[(AVCaptureVideoPreviewLayer *)[[self viewfinder] layer] connection] setVideoOrientation:orientation];
}
- (void)takePicture:(RCTResponseSenderBlock)callback {
dispatch_async([self sessionQueue], ^{
// Update the orientation on the still image output video connection before capturing.
[[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self viewfinder] layer] connection] videoOrientation]];
// Flash set to Auto for Still Capture
[self setFlashMode:AVCaptureFlashModeAuto forDevice:[[self captureDeviceInput] device]];
// Capture a still image.
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
UIImage *rotatedImage = [image resizedImage:CGSizeMake(image.size.width, image.size.height) interpolationQuality:kCGInterpolationDefault];
NSString *imageBase64 = [UIImageJPEGRepresentation(rotatedImage, 1.0) base64EncodedStringWithOptions:0];
callback(@[[NSNull null], imageBase64]);
}
else {
callback(@[RCTMakeError([error description], nil, nil)]);
}
}];
});
}
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
return captureDevice;
}
- (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if ([device hasFlash] && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
}
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self captureDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
});
}
@end

View File

@ -5,7 +5,17 @@
@interface RCTCameraManager : RCTViewManager
@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) AVCaptureSession *session;
@property (nonatomic) AVCaptureDeviceInput *captureDeviceInput;
@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
@property (nonatomic) id runtimeErrorHandlingObserver;
@property (nonatomic) NSInteger presetCamera;
@property (nonatomic) RCTCamera *currentCamera;
- (void)changeCamera:(NSInteger)camera;
- (void)changeOrientation:(NSInteger)orientation;
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position;
- (void)takePicture:(RCTResponseSenderBlock)callback;
@end

View File

@ -2,15 +2,19 @@
#import "RCTCamera.h"
#import "RCTBridge.h"
#import "RCTUtils.h"
#import "RCTLog.h"
#import "UIView+React.h"
#import "UIImage+Resize.h"
#import <AVFoundation/AVFoundation.h>
@implementation RCTCameraManager
RCT_EXPORT_MODULE();
- (UIView *)view
{
[self setCurrentCamera:[[RCTCamera alloc] init]];
return _currentCamera;
self.currentCamera = [[RCTCamera alloc] initWithManager:self];
return self.currentCamera;
}
RCT_EXPORT_VIEW_PROPERTY(aspect, NSString);
@ -38,8 +42,60 @@ RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger);
};
}
- (void)checkDeviceAuthorizationStatus:(RCTResponseSenderBlock) callback {
RCT_EXPORT();
- (id)init {
if ((self = [super init])) {
self.session = [AVCaptureSession new];
self.session.sessionPreset = AVCaptureSessionPresetHigh;
self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
dispatch_async(self.sessionQueue, ^{
NSError *error = nil;
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
self.presetCamera = AVCaptureDevicePositionBack;
}
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
if ([self.session canAddInput:captureDeviceInput])
{
[self.session addInput:captureDeviceInput];
self.captureDeviceInput = captureDeviceInput;
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([self.session canAddOutput:stillImageOutput])
{
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
[self.session addOutput:stillImageOutput];
self.stillImageOutput = stillImageOutput;
}
__weak RCTCameraManager *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
RCTCameraManager *strongSelf = weakSelf;
dispatch_async(strongSelf.sessionQueue, ^{
// Manually restarting the session since it must have been stopped due to an error.
[strongSelf.session startRunning];
});
}]];
[self.session startRunning];
});
}
return self;
}
RCT_EXPORT_METHOD(checkDeviceAuthorizationStatus:(RCTResponseSenderBlock) callback)
{
NSString *mediaType = AVMediaTypeVideo;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
@ -48,21 +104,131 @@ RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger);
}
- (void)switchCamera:(NSInteger)camera
{
RCT_EXPORT();
[_currentCamera changeCamera:camera];
RCT_EXPORT_METHOD(changeCamera:(NSInteger)camera) {
AVCaptureDevice *currentCaptureDevice = [self.captureDeviceInput device];
AVCaptureDevicePosition position = (AVCaptureDevicePosition)camera;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
NSError *error = nil;
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
[self.session beginConfiguration];
[self.session removeInput:self.captureDeviceInput];
if ([self.session canAddInput:captureDeviceInput])
{
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
// [self setFlashMode:AVCaptureFlashModeAuto forDevice:captureDevice];
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
[self.session addInput:captureDeviceInput];
self.captureDeviceInput = captureDeviceInput;
}
else
{
[self.session addInput:self.captureDeviceInput];
}
[self.session commitConfiguration];
}
- (void)setOrientation:(NSInteger)orientation
{
RCT_EXPORT();
[_currentCamera changeOrientation:orientation];
RCT_EXPORT_METHOD(changeOrientation:(NSInteger)orientation) {
((AVCaptureVideoPreviewLayer *)self.currentCamera.viewfinder.layer).connection.videoOrientation = orientation;
}
- (void)takePicture:(RCTResponseSenderBlock) callback {
RCT_EXPORT();
[_currentCamera takePicture:callback];
RCT_EXPORT_METHOD(takePicture:(RCTResponseSenderBlock)callback) {
[[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:((AVCaptureVideoPreviewLayer *)self.currentCamera.viewfinder.layer).connection.videoOrientation];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
UIImage *rotatedImage = [image resizedImage:CGSizeMake(image.size.width, image.size.height) interpolationQuality:kCGInterpolationDefault];
NSString *imageBase64 = [UIImageJPEGRepresentation(rotatedImage, 1.0) base64EncodedStringWithOptions:0];
callback(@[[NSNull null], imageBase64]);
}
else {
callback(@[RCTMakeError(error.description, nil, nil)]);
}
}];
}
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
return captureDevice;
}
- (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if (device.hasFlash && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
}
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self captureDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
});
}

View File

@ -1,11 +1,11 @@
# react-native-camera
A camera viewport for React Native. This module is currently in the very early stages of development.
A camera viewport for React Native. This module is currently in the very early stages of development. **Module is blowing up right now with recent changes from the core React Native team. Change is good! But it's breaking this module. Be warned.**
## Known Issues
Below is a list of known issues. Pull requests are welcome for any of these issues!
- `takePicture()` crashes on `react-native >= 0.3.5`
- Viewfinder *does not work*. This is a temporary issue that I'm resolving, but right now it will appear as the module is not working. The camera does in fact work though.
- [Camera module may cause app to crash in simulator](https://github.com/lwansbrough/react-native-camera/issues/8)
## Getting started