react-native-camera/ios/RCTCameraManager.m

1034 lines
39 KiB
Mathematica
Raw Normal View History

2015-03-31 18:02:57 -07:00
#import "RCTCameraManager.h"
#import "RCTCamera.h"
#import <React/RCTBridge.h>
#import <React/RCTEventDispatcher.h>
#import <React/RCTUtils.h>
#import <React/RCTLog.h>
#import <React/UIView+React.h>
2015-07-10 14:39:39 -07:00
#import "NSMutableDictionary+ImageMetadata.m"
2015-06-02 14:23:20 -07:00
#import <AssetsLibrary/ALAssetsLibrary.h>
2015-03-31 18:02:57 -07:00
#import <AVFoundation/AVFoundation.h>
2015-07-10 01:17:49 -07:00
#import <ImageIO/ImageIO.h>
#import "RCTSensorOrientationChecker.h"
@interface RCTCameraManager ()
@property (strong, nonatomic) RCTSensorOrientationChecker * sensorOrientationChecker;
@property (assign, nonatomic) NSInteger* flashMode;
@end
2015-03-31 18:02:57 -07:00
@implementation RCTCameraManager
RCT_EXPORT_MODULE();
- (UIView *)viewWithProps:(__unused NSDictionary *)props
{
self.presetCamera = ((NSNumber *)props[@"type"]).integerValue;
return [self view];
}
2015-03-31 18:02:57 -07:00
- (UIView *)view
{
2016-04-02 15:16:15 -07:00
self.session = [AVCaptureSession new];
#if !(TARGET_IPHONE_SIMULATOR)
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.previewLayer.needsDisplayOnBoundsChange = YES;
#endif
if(!self.camera){
self.camera = [[RCTCamera alloc] initWithManager:self bridge:self.bridge];
}
return self.camera;
2015-03-31 18:02:57 -07:00
}
- (NSDictionary *)constantsToExport
{
NSMutableDictionary * runtimeBarcodeTypes = [NSMutableDictionary dictionary];
[runtimeBarcodeTypes setDictionary:@{
@"upce": AVMetadataObjectTypeUPCECode,
@"code39": AVMetadataObjectTypeCode39Code,
@"code39mod43": AVMetadataObjectTypeCode39Mod43Code,
@"ean13": AVMetadataObjectTypeEAN13Code,
@"ean8": AVMetadataObjectTypeEAN8Code,
@"code93": AVMetadataObjectTypeCode93Code,
@"code128": AVMetadataObjectTypeCode128Code,
@"pdf417": AVMetadataObjectTypePDF417Code,
@"qr": AVMetadataObjectTypeQRCode,
@"aztec": AVMetadataObjectTypeAztecCode
}];
if (&AVMetadataObjectTypeInterleaved2of5Code != NULL) {
[runtimeBarcodeTypes setObject:AVMetadataObjectTypeInterleaved2of5Code forKey:@"interleaved2of5"];
}
if(&AVMetadataObjectTypeITF14Code != NULL){
[runtimeBarcodeTypes setObject:AVMetadataObjectTypeITF14Code forKey:@"itf14"];
}
if(&AVMetadataObjectTypeDataMatrixCode != NULL){
[runtimeBarcodeTypes setObject:AVMetadataObjectTypeDataMatrixCode forKey:@"datamatrix"];
}
return @{
@"Aspect": @{
@"stretch": @(RCTCameraAspectStretch),
@"fit": @(RCTCameraAspectFit),
@"fill": @(RCTCameraAspectFill)
},
@"BarCodeType": runtimeBarcodeTypes,
@"Type": @{
@"front": @(RCTCameraTypeFront),
@"back": @(RCTCameraTypeBack)
},
@"CaptureMode": @{
@"still": @(RCTCameraCaptureModeStill),
@"video": @(RCTCameraCaptureModeVideo)
},
2016-03-20 21:31:23 +02:00
@"CaptureQuality": @{
@"low": @(RCTCameraCaptureSessionPresetLow),
@"AVCaptureSessionPresetLow": @(RCTCameraCaptureSessionPresetLow),
@"medium": @(RCTCameraCaptureSessionPresetMedium),
@"AVCaptureSessionPresetMedium": @(RCTCameraCaptureSessionPresetMedium),
@"high": @(RCTCameraCaptureSessionPresetHigh),
@"AVCaptureSessionPresetHigh": @(RCTCameraCaptureSessionPresetHigh),
@"photo": @(RCTCameraCaptureSessionPresetPhoto),
@"AVCaptureSessionPresetPhoto": @(RCTCameraCaptureSessionPresetPhoto),
@"480p": @(RCTCameraCaptureSessionPreset480p),
@"AVCaptureSessionPreset640x480": @(RCTCameraCaptureSessionPreset480p),
@"720p": @(RCTCameraCaptureSessionPreset720p),
@"AVCaptureSessionPreset1280x720": @(RCTCameraCaptureSessionPreset720p),
@"1080p": @(RCTCameraCaptureSessionPreset1080p),
@"AVCaptureSessionPreset1920x1080": @(RCTCameraCaptureSessionPreset1080p)
2016-03-20 21:31:23 +02:00
},
@"CaptureTarget": @{
@"memory": @(RCTCameraCaptureTargetMemory),
@"disk": @(RCTCameraCaptureTargetDisk),
@"temp": @(RCTCameraCaptureTargetTemp),
@"cameraRoll": @(RCTCameraCaptureTargetCameraRoll)
},
@"Orientation": @{
@"auto": @(RCTCameraOrientationAuto),
@"landscapeLeft": @(RCTCameraOrientationLandscapeLeft),
@"landscapeRight": @(RCTCameraOrientationLandscapeRight),
@"portrait": @(RCTCameraOrientationPortrait),
@"portraitUpsideDown": @(RCTCameraOrientationPortraitUpsideDown)
},
@"FlashMode": @{
@"off": @(RCTCameraFlashModeOff),
@"on": @(RCTCameraFlashModeOn),
@"auto": @(RCTCameraFlashModeAuto)
},
@"TorchMode": @{
@"off": @(RCTCameraTorchModeOff),
@"on": @(RCTCameraTorchModeOn),
@"auto": @(RCTCameraTorchModeAuto)
}
};
}
2016-04-14 20:34:08 -07:00
RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(defaultOnFocusComponent, BOOL);
RCT_EXPORT_VIEW_PROPERTY(onFocusChanged, BOOL);
RCT_EXPORT_VIEW_PROPERTY(onZoomChanged, BOOL);
RCT_CUSTOM_VIEW_PROPERTY(captureQuality, NSInteger, RCTCamera) {
NSInteger quality = [RCTConvert NSInteger:json];
NSString *qualityString;
switch (quality) {
default:
case RCTCameraCaptureSessionPresetHigh:
qualityString = AVCaptureSessionPresetHigh;
break;
case RCTCameraCaptureSessionPresetMedium:
qualityString = AVCaptureSessionPresetMedium;
break;
case RCTCameraCaptureSessionPresetLow:
qualityString = AVCaptureSessionPresetLow;
break;
case RCTCameraCaptureSessionPresetPhoto:
qualityString = AVCaptureSessionPresetPhoto;
break;
case RCTCameraCaptureSessionPreset1080p:
qualityString = AVCaptureSessionPreset1920x1080;
break;
case RCTCameraCaptureSessionPreset720p:
qualityString = AVCaptureSessionPreset1280x720;
break;
case RCTCameraCaptureSessionPreset480p:
qualityString = AVCaptureSessionPreset640x480;
break;
}
[self setCaptureQuality:qualityString];
}
2016-04-14 20:34:08 -07:00
RCT_CUSTOM_VIEW_PROPERTY(aspect, NSInteger, RCTCamera) {
NSInteger aspect = [RCTConvert NSInteger:json];
NSString *aspectString;
switch (aspect) {
default:
case RCTCameraAspectFill:
aspectString = AVLayerVideoGravityResizeAspectFill;
break;
case RCTCameraAspectFit:
aspectString = AVLayerVideoGravityResizeAspect;
break;
case RCTCameraAspectStretch:
aspectString = AVLayerVideoGravityResize;
break;
}
self.previewLayer.videoGravity = aspectString;
2015-07-10 12:54:14 -07:00
}
2016-04-14 20:34:08 -07:00
RCT_CUSTOM_VIEW_PROPERTY(type, NSInteger, RCTCamera) {
NSInteger type = [RCTConvert NSInteger:json];
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
self.presetCamera = type;
if (self.session.isRunning) {
dispatch_async(self.sessionQueue, ^{
AVCaptureDevice *currentCaptureDevice = [self.videoCaptureDeviceInput device];
AVCaptureDevicePosition position = (AVCaptureDevicePosition)type;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
if (captureDevice == nil) {
return;
}
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
self.presetCamera = type;
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
NSError *error = nil;
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
if (error || captureDeviceInput == nil)
{
NSLog(@"%@", error);
return;
}
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
[self.session beginConfiguration];
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
[self.session removeInput:self.videoCaptureDeviceInput];
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
if ([self.session canAddInput:captureDeviceInput])
{
[self.session addInput:captureDeviceInput];
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
self.videoCaptureDeviceInput = captureDeviceInput;
[self setFlashMode];
2016-04-14 20:34:08 -07:00
}
else
{
[self.session addInput:self.videoCaptureDeviceInput];
}
2016-06-05 12:40:32 -07:00
2016-04-14 20:34:08 -07:00
[self.session commitConfiguration];
});
}
[self initializeCaptureSessionInput:AVMediaTypeVideo];
}
RCT_CUSTOM_VIEW_PROPERTY(flashMode, NSInteger, RCTCamera) {
self.flashMode = [RCTConvert NSInteger:json];
[self setFlashMode];
}
- (void)setFlashMode {
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
2016-04-14 20:34:08 -07:00
NSError *error = nil;
if (![device hasFlash]) return;
if (![device lockForConfiguration:&error]) {
NSLog(@"%@", error);
return;
2016-04-14 20:34:08 -07:00
}
if (device.hasFlash && [device isFlashModeSupported:self.flashMode])
2016-04-14 20:34:08 -07:00
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:self.flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
2016-04-14 20:34:08 -07:00
}
[device unlockForConfiguration];
2016-04-14 20:34:08 -07:00
}
RCT_CUSTOM_VIEW_PROPERTY(torchMode, NSInteger, RCTCamera) {
2016-04-15 12:44:18 -07:00
dispatch_async(self.sessionQueue, ^{
NSInteger *torchMode = [RCTConvert NSInteger:json];
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
2016-06-05 12:40:32 -07:00
2016-04-15 12:44:18 -07:00
if (![device hasTorch]) return;
if (![device lockForConfiguration:&error]) {
NSLog(@"%@", error);
return;
}
[device setTorchMode: torchMode];
[device unlockForConfiguration];
});
2016-04-14 20:34:08 -07:00
}
RCT_CUSTOM_VIEW_PROPERTY(keepAwake, BOOL, RCTCamera) {
BOOL enabled = [RCTConvert BOOL:json];
[UIApplication sharedApplication].idleTimerDisabled = enabled;
}
RCT_CUSTOM_VIEW_PROPERTY(mirrorImage, BOOL, RCTCamera) {
self.mirrorImage = [RCTConvert BOOL:json];
}
RCT_CUSTOM_VIEW_PROPERTY(barCodeTypes, NSArray, RCTCamera) {
self.barCodeTypes = [RCTConvert NSArray:json];
}
2016-04-15 12:44:18 -07:00
RCT_CUSTOM_VIEW_PROPERTY(captureAudio, BOOL, RCTCamera) {
BOOL captureAudio = [RCTConvert BOOL:json];
if (captureAudio) {
RCTLog(@"capturing audio");
2016-04-15 12:44:18 -07:00
[self initializeCaptureSessionInput:AVMediaTypeAudio];
}
}
- (NSArray *)customDirectEventTypes
{
return @[
@"focusChanged",
@"zoomChanged",
];
}
- (id)init {
if ((self = [super init])) {
self.mirrorImage = false;
2015-07-10 01:07:23 -07:00
self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
2015-07-10 01:07:23 -07:00
2016-03-24 10:12:57 +02:00
self.sensorOrientationChecker = [RCTSensorOrientationChecker new];
}
return self;
}
2016-01-28 11:14:00 +00:00
RCT_EXPORT_METHOD(checkDeviceAuthorizationStatus:(RCTPromiseResolveBlock)resolve
reject:(__unused RCTPromiseRejectBlock)reject) {
2015-11-30 14:42:26 -08:00
__block NSString *mediaType = AVMediaTypeVideo;
2015-07-10 01:07:23 -07:00
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
2015-11-30 14:42:26 -08:00
if (!granted) {
2016-01-31 19:27:46 -08:00
resolve(@(granted));
2015-11-30 14:42:26 -08:00
}
else {
mediaType = AVMediaTypeAudio;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
2016-01-31 19:27:46 -08:00
resolve(@(granted));
2015-11-30 14:42:26 -08:00
}];
}
}];
2015-03-31 18:02:57 -07:00
}
RCT_EXPORT_METHOD(checkVideoAuthorizationStatus:(RCTPromiseResolveBlock)resolve
reject:(__unused RCTPromiseRejectBlock)reject) {
__block NSString *mediaType = AVMediaTypeVideo;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
resolve(@(granted));
}];
}
RCT_EXPORT_METHOD(checkAudioAuthorizationStatus:(RCTPromiseResolveBlock)resolve
reject:(__unused RCTPromiseRejectBlock)reject) {
__block NSString *mediaType = AVMediaTypeAudio;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
resolve(@(granted));
}];
}
RCT_EXPORT_METHOD(changeOrientation:(NSInteger)orientation) {
[self setOrientation:orientation];
}
2016-01-28 11:14:00 +00:00
RCT_EXPORT_METHOD(capture:(NSDictionary *)options
resolve:(RCTPromiseResolveBlock)resolve
reject:(RCTPromiseRejectBlock)reject) {
NSInteger captureMode = [[options valueForKey:@"mode"] intValue];
NSInteger captureTarget = [[options valueForKey:@"target"] intValue];
2015-07-10 01:07:23 -07:00
if (captureMode == RCTCameraCaptureModeStill) {
2016-01-28 11:14:00 +00:00
[self captureStill:captureTarget options:options resolve:resolve reject:reject];
}
else if (captureMode == RCTCameraCaptureModeVideo) {
2016-01-28 11:14:00 +00:00
[self captureVideo:captureTarget options:options resolve:resolve reject:reject];
}
}
RCT_EXPORT_METHOD(stopCapture) {
if (self.movieFileOutput.recording) {
[self.movieFileOutput stopRecording];
}
}
RCT_EXPORT_METHOD(getFOV:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject) {
2015-11-02 15:08:17 +01:00
NSArray *devices = [AVCaptureDevice devices];
AVCaptureDevice *frontCamera;
AVCaptureDevice *backCamera;
double frontFov = 0.0;
double backFov = 0.0;
2015-11-02 15:08:17 +01:00
for (AVCaptureDevice *device in devices) {
NSLog(@"Device name: %@", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(@"Device position : back");
backCamera = device;
backFov = backCamera.activeFormat.videoFieldOfView;
}
else {
NSLog(@"Device position : front");
frontCamera = device;
frontFov = frontCamera.activeFormat.videoFieldOfView;
}
}
}
resolve(@{
[NSNumber numberWithInt:RCTCameraTypeBack]: [NSNumber numberWithDouble: backFov],
[NSNumber numberWithInt:RCTCameraTypeFront]: [NSNumber numberWithDouble: frontFov]
});
2015-11-02 15:08:17 +01:00
}
RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject) {
2015-11-03 15:31:10 -08:00
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
resolve(@(device.hasFlash));
2015-11-03 15:31:10 -08:00
}
- (void)startSession {
#if TARGET_IPHONE_SIMULATOR
return;
#endif
dispatch_async(self.sessionQueue, ^{
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
self.presetCamera = AVCaptureDevicePositionBack;
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([self.session canAddOutput:stillImageOutput])
{
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
[self.session addOutput:stillImageOutput];
self.stillImageOutput = stillImageOutput;
}
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([self.session canAddOutput:movieFileOutput])
{
[self.session addOutput:movieFileOutput];
self.movieFileOutput = movieFileOutput;
}
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
if ([self.session canAddOutput:metadataOutput]) {
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
[self.session addOutput:metadataOutput];
2016-04-09 02:42:28 +02:00
[metadataOutput setMetadataObjectTypes:self.barCodeTypes];
self.metadataOutput = metadataOutput;
}
__weak RCTCameraManager *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
RCTCameraManager *strongSelf = weakSelf;
dispatch_async(strongSelf.sessionQueue, ^{
// Manually restarting the session since it must have been stopped due to an error.
[strongSelf.session startRunning];
});
}]];
[self.session startRunning];
});
}
- (void)stopSession {
#if TARGET_IPHONE_SIMULATOR
self.camera = nil;
return;
#endif
dispatch_async(self.sessionQueue, ^{
self.camera = nil;
[self.previewLayer removeFromSuperlayer];
2016-06-05 12:40:32 -07:00
[self.session commitConfiguration];
[self.session stopRunning];
for(AVCaptureInput *input in self.session.inputs) {
[self.session removeInput:input];
}
for(AVCaptureOutput *output in self.session.outputs) {
[self.session removeOutput:output];
}
});
}
- (void)initializeCaptureSessionInput:(NSString *)type {
dispatch_async(self.sessionQueue, ^{
2016-04-15 12:44:18 -07:00
if (type == AVMediaTypeAudio) {
for (AVCaptureDeviceInput* input in [self.session inputs]) {
if ([input.device hasMediaType:AVMediaTypeAudio]) {
// If an audio input has been configured we don't need to set it up again
return;
}
}
}
2016-06-05 12:40:32 -07:00
2015-07-19 14:12:02 -07:00
[self.session beginConfiguration];
NSError *error = nil;
2015-07-19 14:12:02 -07:00
AVCaptureDevice *captureDevice;
2015-07-19 14:12:02 -07:00
if (type == AVMediaTypeAudio) {
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
}
else if (type == AVMediaTypeVideo) {
captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
}
if (captureDevice == nil) {
return;
}
2015-07-10 01:07:23 -07:00
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
2015-07-10 01:07:23 -07:00
if (error || captureDeviceInput == nil) {
NSLog(@"%@", error);
return;
2015-06-12 15:23:47 -07:00
}
2015-07-10 01:07:23 -07:00
2016-04-15 12:44:18 -07:00
if (type == AVMediaTypeVideo) {
[self.session removeInput:self.videoCaptureDeviceInput];
2015-06-12 15:23:47 -07:00
}
2015-07-10 01:07:23 -07:00
if ([self.session canAddInput:captureDeviceInput]) {
[self.session addInput:captureDeviceInput];
2015-07-10 01:07:23 -07:00
if (type == AVMediaTypeAudio) {
self.audioCaptureDeviceInput = captureDeviceInput;
}
else if (type == AVMediaTypeVideo) {
self.videoCaptureDeviceInput = captureDeviceInput;
[self setFlashMode];
}
2015-07-10 12:54:14 -07:00
[self.metadataOutput setMetadataObjectTypes:self.metadataOutput.availableMetadataObjectTypes];
2015-06-12 15:23:47 -07:00
}
2015-07-10 01:07:23 -07:00
[self.session commitConfiguration];
});
2015-04-30 12:40:19 -07:00
}
- (void)captureStill:(NSInteger)target options:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
{
AVCaptureVideoOrientation orientation = options[@"orientation"] != nil ? [options[@"orientation"] integerValue] : self.orientation;
if (orientation == RCTCameraOrientationAuto) {
#if TARGET_IPHONE_SIMULATOR
[self captureStill:target options:options orientation:self.previewLayer.connection.videoOrientation resolve:resolve reject:reject];
#else
[self.sensorOrientationChecker getDeviceOrientationWithBlock:^(UIInterfaceOrientation orientation) {
[self captureStill:target options:options orientation:[self.sensorOrientationChecker convertToAVCaptureVideoOrientation: orientation] resolve:resolve reject:reject];
}];
#endif
} else {
[self captureStill:target options:options orientation:orientation resolve:resolve reject:reject];
}
}
2015-07-13 16:30:08 -07:00
- (void)captureStill:(NSInteger)target options:(NSDictionary *)options orientation:(AVCaptureVideoOrientation)orientation resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
{
2015-07-13 16:30:08 -07:00
dispatch_async(self.sessionQueue, ^{
#if TARGET_IPHONE_SIMULATOR
2015-07-13 16:30:08 -07:00
CGSize size = CGSizeMake(720, 1280);
UIGraphicsBeginImageContextWithOptions(size, YES, 0);
// Thanks https://gist.github.com/kylefox/1689973
CGFloat hue = ( arc4random() % 256 / 256.0 ); // 0.0 to 1.0
CGFloat saturation = ( arc4random() % 128 / 256.0 ) + 0.5; // 0.5 to 1.0, away from white
CGFloat brightness = ( arc4random() % 128 / 256.0 ) + 0.5; // 0.5 to 1.0, away from black
UIColor *color = [UIColor colorWithHue:hue saturation:saturation brightness:brightness alpha:1];
[color setFill];
UIRectFill(CGRectMake(0, 0, size.width, size.height));
NSDate *currentDate = [NSDate date];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:@"dd.MM.YY HH:mm:ss"];
NSString *text = [dateFormatter stringFromDate:currentDate];
UIFont *font = [UIFont systemFontOfSize:40.0];
NSDictionary *attributes = [NSDictionary dictionaryWithObjects:
@[font, [UIColor blackColor]]
forKeys:
@[NSFontAttributeName, NSForegroundColorAttributeName]];
[text drawAtPoint:CGPointMake(size.width/3, size.height/2) withAttributes:attributes];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
2015-07-13 16:30:08 -07:00
UIGraphicsEndImageContext();
2015-07-10 01:07:23 -07:00
2015-07-13 16:30:08 -07:00
NSData *imageData = UIImageJPEGRepresentation(image, 1.0);
2016-01-28 11:14:00 +00:00
[self saveImage:imageData target:target metadata:nil resolve:resolve reject:reject];
#else
[[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:orientation];
2015-07-10 01:07:23 -07:00
2015-07-13 16:30:08 -07:00
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
2015-07-10 01:17:49 -07:00
if (imageDataSampleBuffer) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
// Create image source
CGImageSourceRef source = CGImageSourceCreateWithData((CFDataRef)imageData, NULL);
//get all the metadata in the image
NSMutableDictionary *imageMetadata = [(NSDictionary *) CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(source, 0, NULL)) mutableCopy];
// create cgimage
CGImageRef cgImage = CGImageSourceCreateImageAtIndex(source, 0, NULL);
// Rotate it
CGImageRef rotatedCGImage;
if ([options objectForKey:@"rotation"]) {
float rotation = [[options objectForKey:@"rotation"] floatValue];
rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:rotation];
} else if ([[options objectForKey:@"fixOrientation"] boolValue] == YES) {
// Get metadata orientation
int metadataOrientation = [[imageMetadata objectForKey:(NSString *)kCGImagePropertyOrientation] intValue];
bool rotated = false;
//see http://www.impulseadventure.com/photo/exif-orientation.html
if (metadataOrientation == 6) {
rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:270];
rotated = true;
} else if (metadataOrientation == 3) {
rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:180];
rotated = true;
} else {
rotatedCGImage = cgImage;
}
if(rotated) {
[imageMetadata setObject:[NSNumber numberWithInteger:1] forKey:(NSString *)kCGImagePropertyOrientation];
CGImageRelease(cgImage);
}
} else {
rotatedCGImage = cgImage;
2015-07-13 16:30:08 -07:00
}
// Erase stupid TIFF stuff
[imageMetadata removeObjectForKey:(NSString *)kCGImagePropertyTIFFDictionary];
// Add input metadata
[imageMetadata mergeMetadata:[options objectForKey:@"metadata"]];
// Create destination thing
NSMutableData *rotatedImageData = [NSMutableData data];
CGImageDestinationRef destination = CGImageDestinationCreateWithData((CFMutableDataRef)rotatedImageData, CGImageSourceGetType(source), 1, NULL);
CFRelease(source);
// add the image to the destination, reattaching metadata
CGImageDestinationAddImage(destination, rotatedCGImage, (CFDictionaryRef) imageMetadata);
// And write
CGImageDestinationFinalize(destination);
CFRelease(destination);
2016-01-28 11:14:00 +00:00
[self saveImage:rotatedImageData target:target metadata:imageMetadata resolve:resolve reject:reject];
CGImageRelease(rotatedCGImage);
2015-07-13 16:30:08 -07:00
}
else {
2016-01-28 11:14:00 +00:00
reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
2015-07-13 16:30:08 -07:00
}
}];
#endif
2015-07-13 16:30:08 -07:00
});
}
2015-07-10 01:07:23 -07:00
2016-01-28 11:14:00 +00:00
- (void)saveImage:(NSData*)imageData target:(NSInteger)target metadata:(NSDictionary *)metadata resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject {
NSString *responseString;
2015-07-10 01:07:23 -07:00
if (target == RCTCameraCaptureTargetMemory) {
2016-03-31 12:59:26 -07:00
resolve(@{@"data":[imageData base64EncodedStringWithOptions:0]});
return;
}
2015-07-10 01:17:49 -07:00
else if (target == RCTCameraCaptureTargetDisk) {
2015-07-10 01:17:49 -07:00
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths firstObject];
2015-07-10 01:17:49 -07:00
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"jpg"];
2015-07-10 01:17:49 -07:00
[fileManager createFileAtPath:fullPath contents:imageData attributes:nil];
responseString = fullPath;
}
else if (target == RCTCameraCaptureTargetTemp) {
NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
NSString *fullPath = [NSString stringWithFormat:@"%@%@.jpg", NSTemporaryDirectory(), fileName];
[imageData writeToFile:fullPath atomically:YES];
responseString = fullPath;
}
2015-07-10 01:17:49 -07:00
else if (target == RCTCameraCaptureTargetCameraRoll) {
2015-07-10 01:17:49 -07:00
[[[ALAssetsLibrary alloc] init] writeImageDataToSavedPhotosAlbum:imageData metadata:metadata completionBlock:^(NSURL* url, NSError* error) {
if (error == nil) {
//path isn't really applicable here (this is an asset uri), but left it in for backward comparability
resolve(@{@"path":[url absoluteString], @"mediaUri":[url absoluteString]});
}
else {
2016-01-28 11:14:00 +00:00
reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
}
}];
return;
}
2016-03-31 12:59:26 -07:00
resolve(@{@"path":responseString});
}
2015-07-15 23:56:42 -07:00
- (CGImageRef)newCGImageRotatedByAngle:(CGImageRef)imgRef angle:(CGFloat)angle
2015-07-10 01:17:49 -07:00
{
CGFloat angleInRadians = angle * (M_PI / 180);
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGRect imgRect = CGRectMake(0, 0, width, height);
CGAffineTransform transform = CGAffineTransformMakeRotation(angleInRadians);
CGRect rotatedRect = CGRectApplyAffineTransform(imgRect, transform);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef bmContext = CGBitmapContextCreate(NULL, rotatedRect.size.width, rotatedRect.size.height, 8, 0, colorSpace, (CGBitmapInfo) kCGImageAlphaPremultipliedFirst);
if (self.mirrorImage) {
CGAffineTransform transform = CGAffineTransformMakeTranslation(rotatedRect.size.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
CGContextConcatCTM(bmContext, transform);
}
2015-07-10 01:17:49 -07:00
CGContextSetAllowsAntialiasing(bmContext, TRUE);
CGContextSetInterpolationQuality(bmContext, kCGInterpolationNone);
CGColorSpaceRelease(colorSpace);
CGContextTranslateCTM(bmContext, +(rotatedRect.size.width/2), +(rotatedRect.size.height/2));
CGContextRotateCTM(bmContext, angleInRadians);
CGContextTranslateCTM(bmContext, -(rotatedRect.size.width/2), -(rotatedRect.size.height/2));
CGContextDrawImage(bmContext, CGRectMake((rotatedRect.size.width-width)/2.0f, (rotatedRect.size.height-height)/2.0f, width, height), imgRef);
CGImageRef rotatedImage = CGBitmapContextCreateImage(bmContext);
CFRelease(bmContext);
return rotatedImage;
}
-(void)captureVideo:(NSInteger)target options:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
{
AVCaptureVideoOrientation orientation = options[@"orientation"] != nil ? [options[@"orientation"] integerValue] : self.orientation;
if (orientation == RCTCameraOrientationAuto) {
[self.sensorOrientationChecker getDeviceOrientationWithBlock:^(UIInterfaceOrientation orientation) {
[self captureVideo:target options:options orientation:[self.sensorOrientationChecker convertToAVCaptureVideoOrientation: orientation] resolve:resolve reject:reject];
}];
} else {
[self captureVideo:target options:options orientation:orientation resolve:resolve reject:reject];
}
}
2015-07-10 01:07:23 -07:00
-(void)captureVideo:(NSInteger)target options:(NSDictionary *)options orientation:(AVCaptureVideoOrientation)orientation resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
{
if (self.movieFileOutput.recording) {
2016-01-28 11:14:00 +00:00
reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Already recording"));
return;
}
2015-07-10 01:07:23 -07:00
2016-04-15 12:44:18 -07:00
if ([[options valueForKey:@"audio"] boolValue]) {
[self initializeCaptureSessionInput:AVMediaTypeAudio];
}
2015-07-10 01:07:23 -07:00
Float64 totalSeconds = [[options valueForKey:@"totalSeconds"] floatValue];
if (totalSeconds > -1) {
int32_t preferredTimeScale = [[options valueForKey:@"preferredTimeScale"] intValue];
CMTime maxDuration = CMTimeMakeWithSeconds(totalSeconds, preferredTimeScale);
self.movieFileOutput.maxRecordedDuration = maxDuration;
}
2015-07-10 01:07:23 -07:00
dispatch_async(self.sessionQueue, ^{
[[self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:orientation];
2015-07-10 01:07:23 -07:00
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
return;
}
}
2015-07-10 01:07:23 -07:00
//Start recording
[self.movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
2015-07-10 01:07:23 -07:00
2016-01-28 11:14:00 +00:00
self.videoResolve = resolve;
self.videoReject = reject;
self.videoTarget = target;
});
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
BOOL recordSuccess = YES;
if ([error code] != noErr) {
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value) {
recordSuccess = [value boolValue];
}
}
if (!recordSuccess) {
2016-01-28 11:14:00 +00:00
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Error while recording"));
return;
}
2015-07-10 01:07:23 -07:00
2016-03-31 12:59:26 -07:00
AVURLAsset* videoAsAsset = [AVURLAsset URLAssetWithURL:outputFileURL options:nil];
AVAssetTrack* videoTrack = [[videoAsAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
float videoWidth;
float videoHeight;
2016-03-31 12:59:26 -07:00
CGSize videoSize = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if ((txf.tx == videoSize.width && txf.ty == videoSize.height) || (txf.tx == 0 && txf.ty == 0)) {
// Video recorded in landscape orientation
videoWidth = videoSize.width;
videoHeight = videoSize.height;
} else {
// Video recorded in portrait orientation, so have to swap reported width/height
videoWidth = videoSize.height;
videoHeight = videoSize.width;
}
2016-03-31 12:59:26 -07:00
NSMutableDictionary *videoInfo = [NSMutableDictionary dictionaryWithDictionary:@{
@"duration":[NSNumber numberWithFloat:CMTimeGetSeconds(videoAsAsset.duration)],
@"width":[NSNumber numberWithFloat:videoWidth],
@"height":[NSNumber numberWithFloat:videoHeight],
@"size":[NSNumber numberWithLongLong:captureOutput.recordedFileSize],
}];
if (self.videoTarget == RCTCameraCaptureTargetCameraRoll) {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
2016-01-28 11:14:00 +00:00
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
return;
}
2016-03-31 12:59:26 -07:00
[videoInfo setObject:[assetURL absoluteString] forKey:@"path"];
self.videoResolve(videoInfo);
}];
}
}
else if (self.videoTarget == RCTCameraCaptureTargetDisk) {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths firstObject];
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"mov"];
2015-07-10 01:07:23 -07:00
NSFileManager * fileManager = [NSFileManager defaultManager];
NSError * error = nil;
2015-07-10 01:07:23 -07:00
//moving to destination
if (!([fileManager moveItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
2016-01-28 11:14:00 +00:00
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
return;
}
2016-03-31 12:59:26 -07:00
[videoInfo setObject:fullPath forKey:@"path"];
self.videoResolve(videoInfo);
}
else if (self.videoTarget == RCTCameraCaptureTargetTemp) {
NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
NSString *fullPath = [NSString stringWithFormat:@"%@%@.mov", NSTemporaryDirectory(), fileName];
NSFileManager * fileManager = [NSFileManager defaultManager];
NSError * error = nil;
//moving to destination
if (!([fileManager moveItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
2016-01-28 11:14:00 +00:00
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
return;
}
2016-03-31 12:59:26 -07:00
[videoInfo setObject:fullPath forKey:@"path"];
self.videoResolve(videoInfo);
}
else {
2016-01-28 11:14:00 +00:00
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Target not supported"));
}
}
2015-04-19 15:53:30 -07:00
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
2015-07-10 01:07:23 -07:00
for (AVMetadataMachineReadableCodeObject *metadata in metadataObjects) {
2016-04-14 20:34:08 -07:00
for (id barcodeType in self.barCodeTypes) {
2016-04-09 02:51:43 +02:00
if ([metadata.type isEqualToString:barcodeType]) {
// Transform the meta-data coordinates to screen coords
AVMetadataMachineReadableCodeObject *transformed = (AVMetadataMachineReadableCodeObject *)[_previewLayer transformedMetadataObjectForMetadataObject:metadata];
2016-01-31 19:27:46 -08:00
NSDictionary *event = @{
@"type": metadata.type,
@"data": metadata.stringValue,
@"bounds": @{
@"origin": @{
@"x": [NSString stringWithFormat:@"%f", transformed.bounds.origin.x],
@"y": [NSString stringWithFormat:@"%f", transformed.bounds.origin.y]
2016-01-31 19:27:46 -08:00
},
@"size": @{
@"height": [NSString stringWithFormat:@"%f", transformed.bounds.size.height],
@"width": [NSString stringWithFormat:@"%f", transformed.bounds.size.width],
2016-01-31 19:27:46 -08:00
}
}
};
2015-07-10 01:07:23 -07:00
2016-01-31 19:27:46 -08:00
[self.bridge.eventDispatcher sendAppEventWithName:@"CameraBarCodeRead" body:event];
}
2015-04-19 15:53:30 -07:00
}
}
2015-04-19 15:53:30 -07:00
}
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
2015-07-10 01:07:23 -07:00
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
2015-07-10 01:07:23 -07:00
return captureDevice;
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
});
}
2016-01-31 19:27:46 -08:00
- (void)focusAtThePoint:(CGPoint) atPoint;
2015-07-27 16:08:33 +06:00
{
Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice");
if (captureDeviceClass != nil) {
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
if([device isFocusPointOfInterestSupported] &&
[device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
iOS: minor focus and zoom changes (#578) * iOS: minor focus and zoom changes RCTCameraManager focusAtThePoint improvements - Dynamicaly get size from bounds of camera view, instead of full screen, since it might not always be the case that the Camera react component is taking up the entire device screen. - Also add setting exposure to the same point of interest we are focusing on. Example - Add onFocusChanged and onZoomChanged empty callback functions by default to Example app, allowing tap-to-focus and pinch-to-zoom to be readily experienced/experimented with. - Updated react/react-native dependencies to match root package.json. TODO/Other remarks - Tap-to-focus seems to still not work perfectly... From logging, it always seems to get the right location in the view (meaning its getting the right location from the user touch and transforming it to the {0, 1} range appropriately), and does indeed engage the focus process, but it seems it sometimes refocuses on the center-ish region of what's in the camera instead of the location that was actually pressed. I thought this might be related to the subjectAreaDidChange getting called, which in turn sets the focus mode to continuous auto-focus at the view center, but from my experimenting, this method never actually gets called. I wasn't able to figure out if there's somewhere else in the library that's forcing continuous auto-focus, or if there's just some bug in our current focus procedure within focusAtThePoint. * Reset Example/package.json dependency versions to master
2017-04-23 14:01:04 -07:00
CGRect cameraViewRect = [[self camera] bounds];
double cameraViewWidth = cameraViewRect.size.width;
double cameraViewHeight = cameraViewRect.size.height;
double focus_x = atPoint.x/cameraViewWidth;
double focus_y = atPoint.y/cameraViewHeight;
CGPoint cameraViewPoint = CGPointMake(focus_x, focus_y);
2015-07-27 16:08:33 +06:00
if([device lockForConfiguration:nil]) {
iOS: minor focus and zoom changes (#578) * iOS: minor focus and zoom changes RCTCameraManager focusAtThePoint improvements - Dynamicaly get size from bounds of camera view, instead of full screen, since it might not always be the case that the Camera react component is taking up the entire device screen. - Also add setting exposure to the same point of interest we are focusing on. Example - Add onFocusChanged and onZoomChanged empty callback functions by default to Example app, allowing tap-to-focus and pinch-to-zoom to be readily experienced/experimented with. - Updated react/react-native dependencies to match root package.json. TODO/Other remarks - Tap-to-focus seems to still not work perfectly... From logging, it always seems to get the right location in the view (meaning its getting the right location from the user touch and transforming it to the {0, 1} range appropriately), and does indeed engage the focus process, but it seems it sometimes refocuses on the center-ish region of what's in the camera instead of the location that was actually pressed. I thought this might be related to the subjectAreaDidChange getting called, which in turn sets the focus mode to continuous auto-focus at the view center, but from my experimenting, this method never actually gets called. I wasn't able to figure out if there's somewhere else in the library that's forcing continuous auto-focus, or if there's just some bug in our current focus procedure within focusAtThePoint. * Reset Example/package.json dependency versions to master
2017-04-23 14:01:04 -07:00
[device setFocusPointOfInterest:cameraViewPoint];
2015-07-27 16:08:33 +06:00
[device setFocusMode:AVCaptureFocusModeAutoFocus];
iOS: minor focus and zoom changes (#578) * iOS: minor focus and zoom changes RCTCameraManager focusAtThePoint improvements - Dynamicaly get size from bounds of camera view, instead of full screen, since it might not always be the case that the Camera react component is taking up the entire device screen. - Also add setting exposure to the same point of interest we are focusing on. Example - Add onFocusChanged and onZoomChanged empty callback functions by default to Example app, allowing tap-to-focus and pinch-to-zoom to be readily experienced/experimented with. - Updated react/react-native dependencies to match root package.json. TODO/Other remarks - Tap-to-focus seems to still not work perfectly... From logging, it always seems to get the right location in the view (meaning its getting the right location from the user touch and transforming it to the {0, 1} range appropriately), and does indeed engage the focus process, but it seems it sometimes refocuses on the center-ish region of what's in the camera instead of the location that was actually pressed. I thought this might be related to the subjectAreaDidChange getting called, which in turn sets the focus mode to continuous auto-focus at the view center, but from my experimenting, this method never actually gets called. I wasn't able to figure out if there's somewhere else in the library that's forcing continuous auto-focus, or if there's just some bug in our current focus procedure within focusAtThePoint. * Reset Example/package.json dependency versions to master
2017-04-23 14:01:04 -07:00
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
2015-07-27 16:08:33 +06:00
[device setExposureMode:AVCaptureExposureModeAutoExpose];
iOS: minor focus and zoom changes (#578) * iOS: minor focus and zoom changes RCTCameraManager focusAtThePoint improvements - Dynamicaly get size from bounds of camera view, instead of full screen, since it might not always be the case that the Camera react component is taking up the entire device screen. - Also add setting exposure to the same point of interest we are focusing on. Example - Add onFocusChanged and onZoomChanged empty callback functions by default to Example app, allowing tap-to-focus and pinch-to-zoom to be readily experienced/experimented with. - Updated react/react-native dependencies to match root package.json. TODO/Other remarks - Tap-to-focus seems to still not work perfectly... From logging, it always seems to get the right location in the view (meaning its getting the right location from the user touch and transforming it to the {0, 1} range appropriately), and does indeed engage the focus process, but it seems it sometimes refocuses on the center-ish region of what's in the camera instead of the location that was actually pressed. I thought this might be related to the subjectAreaDidChange getting called, which in turn sets the focus mode to continuous auto-focus at the view center, but from my experimenting, this method never actually gets called. I wasn't able to figure out if there's somewhere else in the library that's forcing continuous auto-focus, or if there's just some bug in our current focus procedure within focusAtThePoint. * Reset Example/package.json dependency versions to master
2017-04-23 14:01:04 -07:00
[device setExposurePointOfInterest:cameraViewPoint];
2015-07-27 16:08:33 +06:00
}
[device unlockForConfiguration];
}
}
});
}
}
2016-01-31 19:27:46 -08:00
- (void)zoom:(CGFloat)velocity reactTag:(NSNumber *)reactTag{
2016-05-28 20:12:09 +01:00
if (isnan(velocity)) {
return;
}
2015-07-27 16:08:33 +06:00
const CGFloat pinchVelocityDividerFactor = 20.0f; // TODO: calibrate or make this component's property
NSError *error = nil;
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
if ([device lockForConfiguration:&error]) {
CGFloat zoomFactor = device.videoZoomFactor + atan(velocity / pinchVelocityDividerFactor);
if (zoomFactor > device.activeFormat.videoMaxZoomFactor) {
zoomFactor = device.activeFormat.videoMaxZoomFactor;
} else if (zoomFactor < 1) {
zoomFactor = 1.0f;
}
NSDictionary *event = @{
2016-01-31 19:27:46 -08:00
@"target": reactTag,
@"zoomFactor": [NSNumber numberWithDouble:zoomFactor],
@"velocity": [NSNumber numberWithDouble:velocity]
};
[self.bridge.eventDispatcher sendInputEventWithName:@"zoomChanged" body:event];
device.videoZoomFactor = zoomFactor;
2015-07-27 16:08:33 +06:00
[device unlockForConfiguration];
} else {
NSLog(@"error: %@", error);
}
}
2016-03-20 21:31:23 +02:00
- (void)setCaptureQuality:(NSString *)quality
{
#if !(TARGET_IPHONE_SIMULATOR)
if (quality) {
[self.session beginConfiguration];
if ([self.session canSetSessionPreset:quality]) {
self.session.sessionPreset = quality;
}
[self.session commitConfiguration];
2016-03-20 21:31:23 +02:00
}
#endif
2016-03-20 21:31:23 +02:00
}
2015-03-31 18:02:57 -07:00
@end