Merge branch 'feature/tap-to-focus' of git://github.com/unknownexception/react-native-camera into unknownexception-feature/tap-to-focus

# Conflicts:
#	README.md
#	index.ios.js
This commit is contained in:
Lochlan Wansbrough 2015-09-22 19:41:06 -07:00
commit 1c97947d23
9 changed files with 251 additions and 14 deletions

3
CameraFocusSquare.h Normal file
View File

@ -0,0 +1,3 @@
#import <UIKit/UIKit.h>
@interface RCTCameraFocusSquare : UIView
@end

28
CameraFocusSquare.m Normal file
View File

@ -0,0 +1,28 @@
#import "CameraFocusSquare.h"
#import <QuartzCore/QuartzCore.h>
const float squareLength = 80.0f;
@implementation RCTCameraFocusSquare
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
// Initialization code
[self setBackgroundColor:[UIColor clearColor]];
[self.layer setBorderWidth:2.0];
[self.layer setCornerRadius:4.0];
[self.layer setBorderColor:[UIColor whiteColor].CGColor];
CABasicAnimation* selectionAnimation = [CABasicAnimation
animationWithKeyPath:@"borderColor"];
selectionAnimation.toValue = (id)[UIColor blueColor].CGColor;
selectionAnimation.repeatCount = 8;
[self.layer addAnimation:selectionAnimation
forKey:@"selectionAnimation"];
}
return self;
}
@end

View File

@ -127,7 +127,7 @@ The type of capture that will be performed by the camera - either a still image
#### `captureTarget`
Values: `Camera.constants.CaptureTarget.cameraRoll` (default), `Camera.constants.CaptureTarget.disk`, ~~`Camera.constants.CaptureTarget.memory`~~ (deprecated),
Values: `Camera.constants.CaptureTarget.cameraRoll` (default), `Camera.constants.CaptureTarget.disk`, ~~`Camera.constants.CaptureTarget.memory`~~ (deprecated),
This property allows you to specify the target output of the captured image data. By default the image binary is sent back as a base 64 encoded string. The disk output has been shown to improve capture response time, so that is the recommended value.
@ -186,6 +186,40 @@ Values:
Use the `torchMode` property to specify the camera torch mode.
#### `onFocusChanged`
Args:
```
e: {
nativeEvent: {
touchPoint: { x, y }
}
}
```
Will call when touch to focus has been made.
By default, `onFocusChanged` is not defined and tap-to-focus is disabled.
#### `defaultOnFocusComponent`
Values:
`true` (default)
`false`
If `defaultOnFocusComponent` set to false, default internal implementation of visual feedback for tap-to-focus gesture will be disabled.
#### `onZoomChanged`
Args:
```
e: {
nativeEvent: {
velocity, zoomFactor
}
}
```
Will call when focus has changed.
By default, `onZoomChanged` is not defined and pinch-to-zoom is disabled.
## Component methods
You can access component methods by adding a `ref` (ie. `ref="camera"`) prop to your `<Camera>` element, then you can use `this.refs.camera.capture(cb)`, etc. inside your component.
@ -210,15 +244,6 @@ Ends the current capture session for video captures. Only applies when the curre
## Subviews
This component supports subviews, so if you wish to use the camera view as a background or if you want to layout buttons/images/etc. inside the camera then you can do that.
## Todo
These are some features I think would be important/beneficial to have included with this module. Pull requests welcome!
- [x] Video support
- [x] Flash mode setting
- [x] Automatic orientation adjustment
- [ ] Tap to focus
- [ ] Optional facial recognition (w/ ability to style box around face)
------------
Thanks to Brent Vatne (@brentvatne) for the `react-native-video` module which provided me with a great example of how to set up this module.

View File

@ -44,7 +44,10 @@ var Camera = React.createClass({
torchMode: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number
])
]),
defaultOnFocusComponent: PropTypes.bool,
onFocusChanged: PropTypes.func,
onZoomChanged: PropTypes.func
},
setNativeProps(props) {

View File

@ -1,12 +1,15 @@
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "CameraFocusSquare.h"
@class RCTCameraManager;
@interface RCTCamera : UIView
@property (nonatomic) RCTCameraManager *manager;
@property (nonatomic) RCTBridge *bridge;
@property (nonatomic) RCTCameraFocusSquare *camFocus;
- (id)initWithManager:(RCTCameraManager*)manager;
- (id)initWithManager:(RCTCameraManager*)manager bridge:(RCTBridge *)bridge;
@end

View File

@ -3,10 +3,20 @@
#import "RCTCameraManager.h"
#import "RCTLog.h"
#import "RCTUtils.h"
#import "RCTEventDispatcher.h"
#import "UIView+React.h"
#import <AVFoundation/AVFoundation.h>
#import "CameraFocusSquare.h"
@implementation RCTCamera
{
BOOL _multipleTouches;
BOOL _onFocusChanged;
BOOL _defaultOnFocusComponent;
BOOL _onZoomChanged;
}
- (void)setAspect:(NSInteger)aspect
{
@ -59,13 +69,41 @@
[self.manager changeTorchMode:torchMode];
}
- (id)initWithManager:(RCTCameraManager*)manager
- (void)setOnFocusChanged:(BOOL)enabled
{
if (_onFocusChanged != enabled) {
_onFocusChanged = enabled;
}
}
- (void)setDefaultOnFocusComponent:(BOOL)enabled
{
if (_defaultOnFocusComponent != enabled) {
_defaultOnFocusComponent = enabled;
}
}
- (void)setOnZoomChanged:(BOOL)enabled
{
if (_onZoomChanged != enabled) {
_onZoomChanged = enabled;
}
}
- (id)initWithManager:(RCTCameraManager*)manager bridge:(RCTBridge *)bridge
{
if ((self = [super init])) {
self.manager = manager;
self.bridge = bridge;
UIPinchGestureRecognizer *pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(handlePinchToZoomRecognizer:)];
[self addGestureRecognizer:pinchGesture];
[self.manager initializeCaptureSessionInput:AVMediaTypeVideo];
[self.manager startSession];
_multipleTouches = NO;
_onFocusChanged = NO;
_defaultOnFocusComponent = YES;
_onZoomChanged = NO;
}
return self;
}
@ -102,4 +140,70 @@
[self.manager changeOrientation:orientation];
}
- (void) touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
// Update the touch state.
if ([[event touchesForView:self] count] > 1) {
_multipleTouches = YES;
}
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
if (!_onFocusChanged) return;
BOOL allTouchesEnded = ([touches count] == [[event touchesForView:self] count]);
// Do not conflict with zooming and etc.
if (allTouchesEnded && !_multipleTouches) {
UITouch *touch = [[event allTouches] anyObject];
CGPoint touchPoint = [touch locationInView:touch.view];
// Focus camera on this point
[self.manager focusAtThePoint:touchPoint];
if (self.camFocus)
{
[self.camFocus removeFromSuperview];
}
NSDictionary *event = @{
@"target": self.reactTag,
@"touchPoint": @{
@"x": [NSNumber numberWithDouble:touchPoint.x],
@"y": [NSNumber numberWithDouble:touchPoint.y]
}
};
[self.bridge.eventDispatcher sendInputEventWithName:@"focusChanged" body:event];
// Show animated rectangle on the touched area
if (_defaultOnFocusComponent) {
self.camFocus = [[RCTCameraFocusSquare alloc]initWithFrame:CGRectMake(touchPoint.x-40, touchPoint.y-40, 80, 80)];
[self.camFocus setBackgroundColor:[UIColor clearColor]];
[self addSubview:self.camFocus];
[self.camFocus setNeedsDisplay];
[UIView beginAnimations:nil context:NULL];
[UIView setAnimationDuration:1.0];
[self.camFocus setAlpha:0.0];
[UIView commitAnimations];
}
}
if (allTouchesEnded) {
_multipleTouches = NO;
}
}
-(void) handlePinchToZoomRecognizer:(UIPinchGestureRecognizer*)pinchRecognizer {
if (!_onZoomChanged) return;
if (pinchRecognizer.state == UIGestureRecognizerStateChanged) {
[self.manager zoom:pinchRecognizer.velocity reactTag:self.reactTag];
}
}
@end

View File

@ -7,6 +7,7 @@
objects = {
/* Begin PBXBuildFile section */
0314E39D1B661A460092D183 /* CameraFocusSquare.m in Sources */ = {isa = PBXBuildFile; fileRef = 0314E39C1B661A460092D183 /* CameraFocusSquare.m */; };
4107014D1ACB732B00C6AA39 /* RCTCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = 410701481ACB732B00C6AA39 /* RCTCamera.m */; };
4107014E1ACB732B00C6AA39 /* RCTCameraManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 4107014A1ACB732B00C6AA39 /* RCTCameraManager.m */; };
454EBCF41B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m in Sources */ = {isa = PBXBuildFile; fileRef = 454EBCF31B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m */; };
@ -25,6 +26,8 @@
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
0314E39B1B661A0C0092D183 /* CameraFocusSquare.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CameraFocusSquare.h; sourceTree = "<group>"; };
0314E39C1B661A460092D183 /* CameraFocusSquare.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraFocusSquare.m; sourceTree = "<group>"; };
4107012F1ACB723B00C6AA39 /* libRCTCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRCTCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
410701471ACB732B00C6AA39 /* RCTCamera.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTCamera.h; sourceTree = "<group>"; };
410701481ACB732B00C6AA39 /* RCTCamera.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTCamera.m; sourceTree = "<group>"; };
@ -47,6 +50,8 @@
410701241ACB719800C6AA39 = {
isa = PBXGroup;
children = (
0314E39C1B661A460092D183 /* CameraFocusSquare.m */,
0314E39B1B661A0C0092D183 /* CameraFocusSquare.h */,
454EBCF31B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m */,
410701471ACB732B00C6AA39 /* RCTCamera.h */,
410701481ACB732B00C6AA39 /* RCTCamera.m */,
@ -119,6 +124,7 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
0314E39D1B661A460092D183 /* CameraFocusSquare.m in Sources */,
454EBCF41B5082DC00AD0F86 /* NSMutableDictionary+ImageMetadata.m in Sources */,
4107014E1ACB732B00C6AA39 /* RCTCameraManager.m in Sources */,
4107014D1ACB732B00C6AA39 /* RCTCamera.m in Sources */,

View File

@ -60,6 +60,7 @@ typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
@property (nonatomic) NSInteger videoTarget;
@property (nonatomic, strong) RCTResponseSenderBlock videoCallback;
- (void)changeAspect:(NSString *)aspect;
- (void)changeCamera:(NSInteger)camera;
- (void)changeOrientation:(NSInteger)orientation;
@ -71,5 +72,8 @@ typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
- (void)stopCapture;
- (void)startSession;
- (void)stopSession;
- (void)focusAtThePoint:(CGPoint) atPoint;
- (void)zoom:(CGFloat)velocity reactTag:(NSNumber *)reactTag;
@end

View File

@ -16,7 +16,7 @@ RCT_EXPORT_MODULE();
- (UIView *)view
{
return [[RCTCamera alloc] initWithManager:self];
return [[RCTCamera alloc] initWithManager:self bridge:self.bridge];
}
RCT_EXPORT_VIEW_PROPERTY(aspect, NSInteger);
@ -93,6 +93,18 @@ RCT_EXPORT_VIEW_PROPERTY(torchMode, NSInteger);
];
}
RCT_EXPORT_VIEW_PROPERTY(defaultOnFocusComponent, BOOL);
RCT_EXPORT_VIEW_PROPERTY(onFocusChanged, BOOL)
RCT_EXPORT_VIEW_PROPERTY(onZoomChanged, BOOL)
- (NSArray *)customDirectEventTypes
{
return @[
@"focusChanged",
@"zoomChanged",
];
}
- (id)init {
if ((self = [super init])) {
@ -105,6 +117,7 @@ RCT_EXPORT_VIEW_PROPERTY(torchMode, NSInteger);
self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
}
return self;
}
@ -658,5 +671,53 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
});
}
- (void) focusAtThePoint:(CGPoint) atPoint;
{
Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice");
if (captureDeviceClass != nil) {
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
if([device isFocusPointOfInterestSupported] &&
[device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
CGRect screenRect = [[UIScreen mainScreen] bounds];
double screenWidth = screenRect.size.width;
double screenHeight = screenRect.size.height;
double focus_x = atPoint.x/screenWidth;
double focus_y = atPoint.y/screenHeight;
if([device lockForConfiguration:nil]) {
[device setFocusPointOfInterest:CGPointMake(focus_x,focus_y)];
[device setFocusMode:AVCaptureFocusModeAutoFocus];
if ([device isExposureModeSupported:AVCaptureExposureModeAutoExpose]){
[device setExposureMode:AVCaptureExposureModeAutoExpose];
}
[device unlockForConfiguration];
}
}
});
}
}
- (void) zoom:(CGFloat)velocity reactTag:(NSNumber *)reactTag{
const CGFloat pinchVelocityDividerFactor = 20.0f; // TODO: calibrate or make this component's property
NSError *error = nil;
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
if ([device lockForConfiguration:&error]) {
CGFloat zoomFactor = device.videoZoomFactor + atan(velocity / pinchVelocityDividerFactor);
NSDictionary *event = @{
@"target": reactTag,
@"zoomFactor": [NSNumber numberWithDouble:zoomFactor],
@"velocity": [NSNumber numberWithDouble:velocity]
};
[self.bridge.eventDispatcher sendInputEventWithName:@"zoomChanged" body:event];
device.videoZoomFactor = zoomFactor >= 1.0f ? zoomFactor : 1.0f;
[device unlockForConfiguration];
} else {
NSLog(@"error: %@", error);
}
}
@end