Still image capture now works correctly!

This commit is contained in:
Lochlan Wansbrough 2015-04-08 13:51:31 -07:00
parent 20c309156d
commit 75129b54d7
9 changed files with 461 additions and 211 deletions

View File

@ -12,7 +12,7 @@ var merge = require('merge');
var Camera = React.createClass({
propTypes: {
aspect: PropTypes.string,
camera: PropTypes.string,
type: PropTypes.string,
orientation: PropTypes.string,
},
@ -27,7 +27,7 @@ var Camera = React.createClass({
return {
isAuthorized: false,
aspect: this.props.aspect || 'Fill',
camera: this.props.camera || 'Back',
type: this.props.type || 'Back',
orientation: this.props.orientation || 'Portrait'
};
},
@ -43,13 +43,13 @@ var Camera = React.createClass({
var style = flattenStyle([styles.base, this.props.style]);
aspect = NativeModules.CameraManager.aspects[this.state.aspect];
camera = NativeModules.CameraManager.cameras[this.state.camera];
type = NativeModules.CameraManager.cameras[this.state.type];
orientation = NativeModules.CameraManager.orientations[this.state.orientation];
var nativeProps = merge(this.props, {
style,
aspect: aspect,
camera: camera,
type: type,
orientation: orientation,
});
@ -57,7 +57,7 @@ var Camera = React.createClass({
},
switch: function() {
this.state.camera = this.state.camera == 'Back' ? 'Front' : 'Back';
this.state.type = this.state.type == 'Back' ? 'Front' : 'Back';
this.setState(this.state);
},
@ -69,16 +69,14 @@ var Camera = React.createClass({
var RCTCamera = createReactIOSNativeComponentClass({
validAttributes: merge(ReactIOSViewAttributes.UIView, {
aspect: true,
camera: true,
type: true,
orientation: true
}),
uiViewClassName: 'RCTCamera',
});
var styles = StyleSheet.create({
base: {
overflow: 'hidden'
},
base: { },
});
module.exports = Camera;

View File

@ -1,12 +1,23 @@
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "ViewfinderView.h"
#import "UIView+React.h"
@class RCTCameraManager;
@interface RCTCamera : UIView
@property (nonatomic) RCTCameraManager *cameraManager;
@property (nonatomic) ViewfinderView *viewfinder;
@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) AVCaptureSession *session;
@property (nonatomic) AVCaptureDeviceInput *captureDeviceInput;
@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
@property (nonatomic) id runtimeErrorHandlingObserver;
@property (nonatomic) NSInteger presetCamera;
- (void)changeCamera:(NSInteger)camera;
- (void)changeOrientation:(NSInteger)orientation;
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position;
- (void)takePicture:(RCTResponseSenderBlock)callback;
@end

View File

@ -2,7 +2,9 @@
#import "RCTCamera.h"
#import "RCTCameraManager.h"
#import "RCTLog.h"
#import "RCTUtils.h"
#import "ViewfinderView.h"
#import "UIImage+Resize.h"
#import <AVFoundation/AVFoundation.h>
@implementation RCTCamera
@ -12,24 +14,77 @@
[(AVCaptureVideoPreviewLayer *)[[self viewfinder] layer] setVideoGravity:aspect];
}
- (void)setCamera:(NSInteger)camera
- (void)setType:(NSInteger)camera
{
[[self cameraManager] setCamera:camera];
if ([[self session] isRunning]) {
[self changeCamera:camera];
}
else {
[self setPresetCamera:camera];
}
}
- (void)setOrientation:(NSInteger)orientation
{
[[self cameraManager] setOrientation:orientation];
[self changeOrientation:orientation];
}
- (id)init
{
if ((self = [super init])) {
[self setCameraManager:[RCTCameraManager sharedManager]];
[self setViewfinder:[[ViewfinderView alloc] init]];
[[self viewfinder] setSession:[[self cameraManager] session]];
[self setSession:[[AVCaptureSession alloc] init]];
[[self session] setSessionPreset:AVCaptureSessionPresetHigh];
[[self viewfinder] setSession:[self session]];
[self addSubview:[self viewfinder]];
[[self session] startRunning];
dispatch_queue_t sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
dispatch_async(sessionQueue, ^{
NSError *error = nil;
NSInteger presetCamera = [self presetCamera];
if ([self presetCamera] == AVCaptureDevicePositionUnspecified) {
presetCamera = AVCaptureDevicePositionBack;
}
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:presetCamera];
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
if ([[self session] canAddInput:captureDeviceInput])
{
[[self session] addInput:captureDeviceInput];
[self setCaptureDeviceInput:captureDeviceInput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([[self session] canAddOutput:stillImageOutput])
{
[stillImageOutput setOutputSettings:@{AVVideoCodecKey : AVVideoCodecJPEG}];
[[self session] addOutput:stillImageOutput];
[self setStillImageOutput:stillImageOutput];
}
__weak RCTCamera *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:[self session] queue:nil usingBlock:^(NSNotification *note) {
RCTCamera *strongSelf = weakSelf;
dispatch_async([strongSelf sessionQueue], ^{
// Manually restarting the session since it must have been stopped due to an error.
[[strongSelf session] startRunning];
});
}]];
});
}
return self;
}
@ -48,7 +103,7 @@
- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex
{
[[self viewfinder] insertSubview:view atIndex:atIndex + 1];
[self insertSubview:view atIndex:atIndex + 1];
return;
}
@ -58,4 +113,142 @@
return;
}
- (void)changeCamera:(NSInteger)camera {
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *currentCaptureDevice = [[self captureDeviceInput] device];
AVCaptureDevicePosition position = (AVCaptureDevicePosition)camera;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
NSError *error = nil;
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
[[self session] beginConfiguration];
[[self session] removeInput:[self captureDeviceInput]];
if ([[self session] canAddInput:captureDeviceInput])
{
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
[self setFlashMode:AVCaptureFlashModeAuto forDevice:captureDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
[[self session] addInput:captureDeviceInput];
[self setCaptureDeviceInput:captureDeviceInput];
}
else
{
[[self session] addInput:[self captureDeviceInput]];
}
[[self session] commitConfiguration];
});
}
- (void)changeOrientation:(NSInteger)orientation {
[[(AVCaptureVideoPreviewLayer *)[[self viewfinder] layer] connection] setVideoOrientation:orientation];
}
- (void)takePicture:(RCTResponseSenderBlock)callback {
dispatch_async([self sessionQueue], ^{
// Update the orientation on the still image output video connection before capturing.
[[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self viewfinder] layer] connection] videoOrientation]];
// Flash set to Auto for Still Capture
[self setFlashMode:AVCaptureFlashModeAuto forDevice:[[self captureDeviceInput] device]];
// Capture a still image.
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
UIImage *rotatedImage = [image resizedImage:CGSizeMake(image.size.width, image.size.height) interpolationQuality:kCGInterpolationDefault];
NSString *imageBase64 = [UIImageJPEGRepresentation(rotatedImage, 1.0) base64EncodedStringWithOptions:0];
callback(@[[NSNull null], imageBase64]);
}
else {
callback(@[RCTMakeError([error description], nil, nil)]);
}
}];
});
}
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
return captureDevice;
}
- (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if ([device hasFlash] && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
}
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self captureDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
});
}
@end

View File

@ -5,16 +5,7 @@
@interface RCTCameraManager : RCTViewManager
@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) AVCaptureSession *session;
@property (nonatomic) AVCaptureDeviceInput *captureDeviceInput;
@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
@property (nonatomic) RCTCamera *currentCamera;
+ (id)sharedManager;
- (id)init;
- (void)setCamera:(NSInteger) camera;
- (void)setOrientation:(NSInteger) orientation;
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position;
@end

View File

@ -2,25 +2,11 @@
#import "RCTCamera.h"
#import "RCTBridge.h"
#import "RCTUtils.h"
#import "UIView+React.h"
#import <AVFoundation/AVFoundation.h>
static void * CapturingStillImageContext = &CapturingStillImageContext;
static void * RecordingContext = &RecordingContext;
static void * SessionRunningAndDeviceAuthorizedContext = &SessionRunningAndDeviceAuthorizedContext;
@implementation RCTCameraManager
+ (id)sharedManager {
static RCTCameraManager *sharedCameraManager = nil;
@synchronized(self) {
if (sharedCameraManager == nil)
sharedCameraManager = [[self alloc] init];
}
return sharedCameraManager;
}
@synthesize bridge = _bridge;
- (UIView *)view
{
[self setCurrentCamera:[[RCTCamera alloc] init]];
@ -28,7 +14,7 @@ static void * SessionRunningAndDeviceAuthorizedContext = &SessionRunningAndDevic
}
RCT_EXPORT_VIEW_PROPERTY(aspect, NSString);
RCT_EXPORT_VIEW_PROPERTY(camera, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(type, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger);
- (NSDictionary *)constantsToExport
@ -52,45 +38,6 @@ RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger);
};
}
- (id)init {
if ((self = [super init])) {
[self setSession:[[AVCaptureSession alloc] init]];
[[self session] setSessionPreset:AVCaptureSessionPresetHigh];
dispatch_queue_t sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
dispatch_async(sessionQueue, ^{
NSError *error = nil;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
if ([[self session] canAddInput:captureDeviceInput])
{
[[self session] addInput:captureDeviceInput];
[self setCaptureDeviceInput:captureDeviceInput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([[self session] canAddOutput:stillImageOutput])
{
[stillImageOutput setOutputSettings:@{AVVideoCodecKey : AVVideoCodecJPEG}];
[[self session] addOutput:stillImageOutput];
[self setStillImageOutput:stillImageOutput];
}
[[self session] startRunning];
});
}
return self;
}
- (void)checkDeviceAuthorizationStatus:(RCTResponseSenderBlock) callback {
RCT_EXPORT();
NSString *mediaType = AVMediaTypeVideo;
@ -101,140 +48,22 @@ RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger);
}
- (void)setCamera:(NSInteger)camera
- (void)switchCamera:(NSInteger)camera
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *currentCaptureDevice = [[self captureDeviceInput] device];
AVCaptureDevicePosition position = (AVCaptureDevicePosition)camera;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
NSError *error = nil;
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
[[self session] beginConfiguration];
[[self session] removeInput:[self captureDeviceInput]];
if ([[self session] canAddInput:captureDeviceInput])
{
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
[self setFlashMode:AVCaptureFlashModeAuto forDevice:captureDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
[[self session] addInput:captureDeviceInput];
[self setCaptureDeviceInput:captureDeviceInput];
}
else
{
[[self session] addInput:[self captureDeviceInput]];
}
[[self session] commitConfiguration];
});
RCT_EXPORT();
[_currentCamera changeCamera:camera];
}
- (void)setOrientation:(NSInteger)orientation
{
[[(AVCaptureVideoPreviewLayer *)[[[self currentCamera] viewfinder] layer] connection] setVideoOrientation:orientation];
RCT_EXPORT();
[_currentCamera changeOrientation:orientation];
}
- (void)takePicture:(RCTResponseSenderBlock) callback {
RCT_EXPORT();
dispatch_async([self sessionQueue], ^{
// Update the orientation on the still image output video connection before capturing.
[[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[[self currentCamera] viewfinder] layer] connection] videoOrientation]];
// Flash set to Auto for Still Capture
[self setFlashMode:AVCaptureFlashModeAuto forDevice:[[self captureDeviceInput] device]];
// Capture a still image.
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
NSString *imageBase64 = [UIImageJPEGRepresentation(image, 1.0) base64EncodedStringWithOptions:0];
callback(@[[NSNull null], imageBase64]);
}
else {
callback(@[RCTMakeError([error description], nil, nil)]);
}
}];
});
[_currentCamera takePicture:callback];
}
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
return captureDevice;
}
- (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if ([device hasFlash] && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
}
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self captureDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
});
}
@end

View File

@ -3,9 +3,8 @@
A camera viewport for React Native. This module is currently in the very early stages of development.
## Known Issues
Because this module is still in early development, there are plenty of issues that need to be sorted out. Some of which I am sure I'm unaware of at this point. Below is a list of known issues. Pull requests are welcome for any of these issues!
Below is a list of known issues. Pull requests are welcome for any of these issues!
- [`takePicture` captures first frame from output buffer instead of current frame](https://github.com/lwansbrough/react-native-camera/issues/10)
- [Camera module may cause app to crash in simulator](https://github.com/lwansbrough/react-native-camera/issues/8)
## Getting started
@ -42,6 +41,7 @@ var cameraApp = React.createClass({
<Camera
ref="cam"
aspect="Stretch"
type="Front"
orientation="PortraitUpsideDown"
style={{height: 200, width: 200}}
/>
@ -66,11 +66,11 @@ Values: `Fit`, `Fill` (default), `Stretch`
The `aspect` property allows you to define how your viewfinder renders the camera's view. For instance, if you have a square viewfinder and you want to fill the it entirely, you have two options: `Fill`, where the aspect ratio of the camera's view is preserved by cropping the view or `Stretch`, where the aspect ratio is skewed in order to fit the entire image inside the viewfinder. The other option is `Fit`, which ensures the camera's entire view fits inside your viewfinder without altering the aspect ratio.
#### `camera`
#### `type`
Values: `Front`, `Back` (default)
Use the `camera` property to specify which camera to use.
Use the `type` property to specify which camera to use.
#### `orientation`

21
UIImage+Resize.h Normal file
View File

@ -0,0 +1,21 @@
// UIImage+Resize.h
// Created by Trevor Harmon on 8/5/09.
// Free for personal or commercial use, with or without modification.
// No warranty is expressed or implied.
// Extends the UIImage class to support resizing/cropping
#import <UIKit/UIKit.h>
@interface UIImage (Resize)
- (UIImage *)croppedImage:(CGRect)bounds;
- (UIImage *)thumbnailImage:(NSInteger)thumbnailSize
transparentBorder:(NSUInteger)borderSize
cornerRadius:(NSUInteger)cornerRadius
interpolationQuality:(CGInterpolationQuality)quality;
- (UIImage *)resizedImage:(CGSize)newSize
interpolationQuality:(CGInterpolationQuality)quality;
- (UIImage *)resizedImageWithContentMode:(UIViewContentMode)contentMode
bounds:(CGSize)bounds
interpolationQuality:(CGInterpolationQuality)quality;
@end

207
UIImage+Resize.m Normal file
View File

@ -0,0 +1,207 @@
// UIImage+Resize.m
// Created by Trevor Harmon on 8/5/09.
// Free for personal or commercial use, with or without modification.
// No warranty is expressed or implied.
#import "UIImage+Resize.h"
// Private helper methods
@interface UIImage ()
- (UIImage *)resizedImage:(CGImageRef)imageRef
size:(CGSize)newSize
transform:(CGAffineTransform)transform
drawTransposed:(BOOL)transpose
interpolationQuality:(CGInterpolationQuality)quality;
- (CGAffineTransform)transformForOrientation:(CGSize)newSize;
@end
@implementation UIImage (Resize)
// Returns a copy of this image that is cropped to the given bounds.
// The bounds will be adjusted using CGRectIntegral.
// JPMH-This method no long ignores the image's imageOrientation setting.
- (UIImage *)croppedImage:(CGRect)bounds {
CGAffineTransform txTranslate;
CGAffineTransform txCompound;
CGRect adjustedBounds;
BOOL drawTransposed;
switch (self.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
txTranslate = CGAffineTransformMakeTranslation(self.size.width, self.size.height);
txCompound = CGAffineTransformRotate(txTranslate, M_PI);
adjustedBounds = CGRectApplyAffineTransform(bounds, txCompound);
drawTransposed = NO;
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
txTranslate = CGAffineTransformMakeTranslation(self.size.height, 0.0);
txCompound = CGAffineTransformRotate(txTranslate, M_PI_2);
adjustedBounds = CGRectApplyAffineTransform(bounds, txCompound);
drawTransposed = YES;
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
txTranslate = CGAffineTransformMakeTranslation(0.0, self.size.width);
txCompound = CGAffineTransformRotate(txTranslate, M_PI + M_PI_2);
adjustedBounds = CGRectApplyAffineTransform(bounds, txCompound);
drawTransposed = YES;
break;
default:
adjustedBounds = bounds;
drawTransposed = NO;
}
CGImageRef imageRef = CGImageCreateWithImageInRect([self CGImage], adjustedBounds);
UIImage *croppedImage;
if (CGRectEqualToRect(adjustedBounds, bounds))
croppedImage = [UIImage imageWithCGImage:imageRef];
else
croppedImage = [self resizedImage:imageRef
size:bounds.size
transform:[self transformForOrientation:bounds.size]
drawTransposed:drawTransposed
interpolationQuality:kCGInterpolationHigh];
CGImageRelease(imageRef);
return croppedImage;
}
// Returns a rescaled copy of the image, taking into account its orientation
// The image will be scaled disproportionately if necessary to fit the bounds specified by the parameter
- (UIImage *)resizedImage:(CGSize)newSize interpolationQuality:(CGInterpolationQuality)quality {
BOOL drawTransposed;
switch (self.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
drawTransposed = YES;
break;
default:
drawTransposed = NO;
}
return [self resizedImage:newSize
transform:[self transformForOrientation:newSize]
drawTransposed:drawTransposed
interpolationQuality:quality];
}
// Resizes the image according to the given content mode, taking into account the image's orientation
- (UIImage *)resizedImageWithContentMode:(UIViewContentMode)contentMode
bounds:(CGSize)bounds
interpolationQuality:(CGInterpolationQuality)quality {
CGFloat horizontalRatio = bounds.width / self.size.width;
CGFloat verticalRatio = bounds.height / self.size.height;
CGFloat ratio;
switch (contentMode) {
case UIViewContentModeScaleAspectFill:
ratio = MAX(horizontalRatio, verticalRatio);
break;
case UIViewContentModeScaleAspectFit:
ratio = MIN(horizontalRatio, verticalRatio);
break;
default:
[NSException raise:NSInvalidArgumentException format:@"Unsupported content mode: %d", contentMode];
}
CGSize newSize = CGSizeMake(self.size.width * ratio, self.size.height * ratio);
return [self resizedImage:newSize interpolationQuality:quality];
}
#pragma mark -
#pragma mark Private helper methods
// Returns a copy of the image that has been transformed using the given affine transform and scaled to the new size
// The new image's orientation will be UIImageOrientationUp, regardless of the current image's orientation
// If the new size is not integral, it will be rounded up
- (UIImage *)resizedImage:(CGSize)newSize
transform:(CGAffineTransform)transform
drawTransposed:(BOOL)transpose
interpolationQuality:(CGInterpolationQuality)quality {
CGRect newRect = CGRectIntegral(CGRectMake(0, 0, newSize.width, newSize.height));
CGRect transposedRect = CGRectMake(0, 0, newRect.size.height, newRect.size.width);
CGImageRef imageRef = self.CGImage;
// Build a context that's the same dimensions as the new size
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
if((bitmapInfo == kCGImageAlphaLast) || (bitmapInfo == kCGImageAlphaNone))
bitmapInfo = kCGImageAlphaNoneSkipLast;
CGContextRef bitmap = CGBitmapContextCreate(NULL,
newRect.size.width,
newRect.size.height,
CGImageGetBitsPerComponent(imageRef),
0,
CGImageGetColorSpace(imageRef),
bitmapInfo);
// Rotate and/or flip the image if required by its orientation
CGContextConcatCTM(bitmap, transform);
// Set the quality level to use when rescaling
CGContextSetInterpolationQuality(bitmap, quality);
// Draw into the context; this scales the image
CGContextDrawImage(bitmap, transpose ? transposedRect : newRect, imageRef);
// Get the resized image from the context and a UIImage
CGImageRef newImageRef = CGBitmapContextCreateImage(bitmap);
UIImage *newImage = [UIImage imageWithCGImage:newImageRef];
// Clean up
CGContextRelease(bitmap);
CGImageRelease(newImageRef);
return newImage;
}
// Returns an affine transform that takes into account the image orientation when drawing a scaled image
- (CGAffineTransform)transformForOrientation:(CGSize)newSize {
CGAffineTransform transform = CGAffineTransformIdentity;
switch (self.imageOrientation) {
case UIImageOrientationDown: // EXIF = 3
case UIImageOrientationDownMirrored: // EXIF = 4
transform = CGAffineTransformTranslate(transform, newSize.width, newSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft: // EXIF = 6
case UIImageOrientationLeftMirrored: // EXIF = 5
transform = CGAffineTransformTranslate(transform, newSize.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight: // EXIF = 8
case UIImageOrientationRightMirrored: // EXIF = 7
transform = CGAffineTransformTranslate(transform, 0, newSize.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
}
switch (self.imageOrientation) {
case UIImageOrientationUpMirrored: // EXIF = 2
case UIImageOrientationDownMirrored: // EXIF = 4
transform = CGAffineTransformTranslate(transform, newSize.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored: // EXIF = 5
case UIImageOrientationRightMirrored: // EXIF = 7
transform = CGAffineTransformTranslate(transform, newSize.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
}
return transform;
}
@end

View File

@ -2,9 +2,9 @@
"name": "react-native-camera",
"repository": {
"type" : "git",
"url" : "http://github.com/lwansbrough/react-native-camera.git"
"url" : "https://github.com/lwansbrough/react-native-camera.git"
},
"version": "0.0.4",
"version": "0.0.5",
"description": "A Camera element for React Native",
"main": "Camera.ios.js",
"author": "Lochlan Wansbrough <lochie@live.com> (http://lwansbrough.com)",