2015-03-31 18:02:57 -07:00
|
|
|
#import "RCTCameraManager.h"
|
|
|
|
#import "RCTCamera.h"
|
2017-01-22 19:34:09 -08:00
|
|
|
#import <React/RCTBridge.h>
|
|
|
|
#import <React/RCTEventDispatcher.h>
|
|
|
|
#import <React/RCTUtils.h>
|
|
|
|
#import <React/RCTLog.h>
|
|
|
|
#import <React/UIView+React.h>
|
2015-07-10 14:39:39 -07:00
|
|
|
#import "NSMutableDictionary+ImageMetadata.m"
|
2015-06-02 14:23:20 -07:00
|
|
|
#import <AssetsLibrary/ALAssetsLibrary.h>
|
2015-03-31 18:02:57 -07:00
|
|
|
#import <AVFoundation/AVFoundation.h>
|
2015-07-10 01:17:49 -07:00
|
|
|
#import <ImageIO/ImageIO.h>
|
2016-03-24 09:31:12 +02:00
|
|
|
#import "RCTSensorOrientationChecker.h"
|
2015-12-29 18:08:47 +09:00
|
|
|
|
|
|
|
@interface RCTCameraManager ()
|
|
|
|
|
2016-03-24 09:31:12 +02:00
|
|
|
@property (strong, nonatomic) RCTSensorOrientationChecker * sensorOrientationChecker;
|
2016-12-08 21:05:35 -05:00
|
|
|
@property (assign, nonatomic) NSInteger* flashMode;
|
2015-12-29 18:08:47 +09:00
|
|
|
|
|
|
|
@end
|
2015-03-31 18:02:57 -07:00
|
|
|
|
|
|
|
@implementation RCTCameraManager
|
|
|
|
|
2015-04-10 19:56:30 -07:00
|
|
|
RCT_EXPORT_MODULE();
|
|
|
|
|
2015-12-02 11:39:02 -08:00
|
|
|
- (UIView *)viewWithProps:(__unused NSDictionary *)props
|
|
|
|
{
|
|
|
|
self.presetCamera = ((NSNumber *)props[@"type"]).integerValue;
|
|
|
|
return [self view];
|
|
|
|
}
|
|
|
|
|
2015-03-31 18:02:57 -07:00
|
|
|
- (UIView *)view
|
|
|
|
{
|
2016-04-02 15:16:15 -07:00
|
|
|
self.session = [AVCaptureSession new];
|
2016-09-24 21:18:41 +08:00
|
|
|
#if !(TARGET_IPHONE_SIMULATOR)
|
|
|
|
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
|
|
|
|
self.previewLayer.needsDisplayOnBoundsChange = YES;
|
|
|
|
#endif
|
2016-11-18 14:38:23 -08:00
|
|
|
|
2016-09-13 05:31:46 +02:00
|
|
|
if(!self.camera){
|
|
|
|
self.camera = [[RCTCamera alloc] initWithManager:self bridge:self.bridge];
|
|
|
|
}
|
|
|
|
return self.camera;
|
2015-03-31 18:02:57 -07:00
|
|
|
}
|
|
|
|
|
2015-03-31 19:27:18 -07:00
|
|
|
- (NSDictionary *)constantsToExport
|
|
|
|
{
|
2016-09-09 11:27:07 -04:00
|
|
|
|
|
|
|
NSMutableDictionary * runtimeBarcodeTypes = [NSMutableDictionary dictionary];
|
|
|
|
[runtimeBarcodeTypes setDictionary:@{
|
|
|
|
@"upce": AVMetadataObjectTypeUPCECode,
|
|
|
|
@"code39": AVMetadataObjectTypeCode39Code,
|
|
|
|
@"code39mod43": AVMetadataObjectTypeCode39Mod43Code,
|
|
|
|
@"ean13": AVMetadataObjectTypeEAN13Code,
|
|
|
|
@"ean8": AVMetadataObjectTypeEAN8Code,
|
|
|
|
@"code93": AVMetadataObjectTypeCode93Code,
|
2017-04-03 23:22:46 -07:00
|
|
|
@"code128": AVMetadataObjectTypeCode128Code,
|
2016-09-09 11:27:07 -04:00
|
|
|
@"pdf417": AVMetadataObjectTypePDF417Code,
|
|
|
|
@"qr": AVMetadataObjectTypeQRCode,
|
|
|
|
@"aztec": AVMetadataObjectTypeAztecCode
|
|
|
|
}];
|
|
|
|
|
|
|
|
if (&AVMetadataObjectTypeInterleaved2of5Code != NULL) {
|
|
|
|
[runtimeBarcodeTypes setObject:AVMetadataObjectTypeInterleaved2of5Code forKey:@"interleaved2of5"];
|
|
|
|
}
|
|
|
|
|
|
|
|
if(&AVMetadataObjectTypeITF14Code != NULL){
|
|
|
|
[runtimeBarcodeTypes setObject:AVMetadataObjectTypeITF14Code forKey:@"itf14"];
|
|
|
|
}
|
|
|
|
|
|
|
|
if(&AVMetadataObjectTypeDataMatrixCode != NULL){
|
|
|
|
[runtimeBarcodeTypes setObject:AVMetadataObjectTypeDataMatrixCode forKey:@"datamatrix"];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
return @{
|
|
|
|
@"Aspect": @{
|
|
|
|
@"stretch": @(RCTCameraAspectStretch),
|
|
|
|
@"fit": @(RCTCameraAspectFit),
|
|
|
|
@"fill": @(RCTCameraAspectFill)
|
|
|
|
},
|
2016-09-09 11:27:07 -04:00
|
|
|
@"BarCodeType": runtimeBarcodeTypes,
|
2015-07-09 20:11:50 -07:00
|
|
|
@"Type": @{
|
|
|
|
@"front": @(RCTCameraTypeFront),
|
|
|
|
@"back": @(RCTCameraTypeBack)
|
|
|
|
},
|
|
|
|
@"CaptureMode": @{
|
|
|
|
@"still": @(RCTCameraCaptureModeStill),
|
|
|
|
@"video": @(RCTCameraCaptureModeVideo)
|
|
|
|
},
|
2016-03-20 21:31:23 +02:00
|
|
|
@"CaptureQuality": @{
|
2016-08-02 23:22:15 +03:00
|
|
|
@"low": @(RCTCameraCaptureSessionPresetLow),
|
|
|
|
@"AVCaptureSessionPresetLow": @(RCTCameraCaptureSessionPresetLow),
|
|
|
|
@"medium": @(RCTCameraCaptureSessionPresetMedium),
|
|
|
|
@"AVCaptureSessionPresetMedium": @(RCTCameraCaptureSessionPresetMedium),
|
|
|
|
@"high": @(RCTCameraCaptureSessionPresetHigh),
|
|
|
|
@"AVCaptureSessionPresetHigh": @(RCTCameraCaptureSessionPresetHigh),
|
|
|
|
@"photo": @(RCTCameraCaptureSessionPresetPhoto),
|
2016-11-18 14:38:23 -08:00
|
|
|
@"AVCaptureSessionPresetPhoto": @(RCTCameraCaptureSessionPresetPhoto),
|
|
|
|
@"480p": @(RCTCameraCaptureSessionPreset480p),
|
|
|
|
@"AVCaptureSessionPreset640x480": @(RCTCameraCaptureSessionPreset480p),
|
|
|
|
@"720p": @(RCTCameraCaptureSessionPreset720p),
|
|
|
|
@"AVCaptureSessionPreset1280x720": @(RCTCameraCaptureSessionPreset720p),
|
|
|
|
@"1080p": @(RCTCameraCaptureSessionPreset1080p),
|
|
|
|
@"AVCaptureSessionPreset1920x1080": @(RCTCameraCaptureSessionPreset1080p)
|
2016-03-20 21:31:23 +02:00
|
|
|
},
|
2015-07-09 20:11:50 -07:00
|
|
|
@"CaptureTarget": @{
|
|
|
|
@"memory": @(RCTCameraCaptureTargetMemory),
|
|
|
|
@"disk": @(RCTCameraCaptureTargetDisk),
|
2016-01-12 10:59:52 -08:00
|
|
|
@"temp": @(RCTCameraCaptureTargetTemp),
|
2015-07-09 20:11:50 -07:00
|
|
|
@"cameraRoll": @(RCTCameraCaptureTargetCameraRoll)
|
|
|
|
},
|
|
|
|
@"Orientation": @{
|
|
|
|
@"auto": @(RCTCameraOrientationAuto),
|
|
|
|
@"landscapeLeft": @(RCTCameraOrientationLandscapeLeft),
|
|
|
|
@"landscapeRight": @(RCTCameraOrientationLandscapeRight),
|
|
|
|
@"portrait": @(RCTCameraOrientationPortrait),
|
|
|
|
@"portraitUpsideDown": @(RCTCameraOrientationPortraitUpsideDown)
|
|
|
|
},
|
|
|
|
@"FlashMode": @{
|
|
|
|
@"off": @(RCTCameraFlashModeOff),
|
|
|
|
@"on": @(RCTCameraFlashModeOn),
|
|
|
|
@"auto": @(RCTCameraFlashModeAuto)
|
|
|
|
},
|
|
|
|
@"TorchMode": @{
|
|
|
|
@"off": @(RCTCameraTorchModeOff),
|
|
|
|
@"on": @(RCTCameraTorchModeOn),
|
|
|
|
@"auto": @(RCTCameraTorchModeAuto)
|
|
|
|
}
|
|
|
|
};
|
2015-03-31 19:27:18 -07:00
|
|
|
}
|
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger);
|
|
|
|
RCT_EXPORT_VIEW_PROPERTY(defaultOnFocusComponent, BOOL);
|
|
|
|
RCT_EXPORT_VIEW_PROPERTY(onFocusChanged, BOOL);
|
|
|
|
RCT_EXPORT_VIEW_PROPERTY(onZoomChanged, BOOL);
|
|
|
|
|
2016-08-02 23:22:15 +03:00
|
|
|
RCT_CUSTOM_VIEW_PROPERTY(captureQuality, NSInteger, RCTCamera) {
|
|
|
|
NSInteger quality = [RCTConvert NSInteger:json];
|
|
|
|
NSString *qualityString;
|
|
|
|
switch (quality) {
|
|
|
|
default:
|
|
|
|
case RCTCameraCaptureSessionPresetHigh:
|
|
|
|
qualityString = AVCaptureSessionPresetHigh;
|
|
|
|
break;
|
|
|
|
case RCTCameraCaptureSessionPresetMedium:
|
|
|
|
qualityString = AVCaptureSessionPresetMedium;
|
|
|
|
break;
|
|
|
|
case RCTCameraCaptureSessionPresetLow:
|
|
|
|
qualityString = AVCaptureSessionPresetLow;
|
|
|
|
break;
|
|
|
|
case RCTCameraCaptureSessionPresetPhoto:
|
|
|
|
qualityString = AVCaptureSessionPresetPhoto;
|
|
|
|
break;
|
2016-11-18 14:38:23 -08:00
|
|
|
case RCTCameraCaptureSessionPreset1080p:
|
|
|
|
qualityString = AVCaptureSessionPreset1920x1080;
|
|
|
|
break;
|
|
|
|
case RCTCameraCaptureSessionPreset720p:
|
|
|
|
qualityString = AVCaptureSessionPreset1280x720;
|
|
|
|
break;
|
|
|
|
case RCTCameraCaptureSessionPreset480p:
|
|
|
|
qualityString = AVCaptureSessionPreset640x480;
|
|
|
|
break;
|
2016-08-02 23:22:15 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
[self setCaptureQuality:qualityString];
|
|
|
|
}
|
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
RCT_CUSTOM_VIEW_PROPERTY(aspect, NSInteger, RCTCamera) {
|
|
|
|
NSInteger aspect = [RCTConvert NSInteger:json];
|
|
|
|
NSString *aspectString;
|
|
|
|
switch (aspect) {
|
|
|
|
default:
|
|
|
|
case RCTCameraAspectFill:
|
|
|
|
aspectString = AVLayerVideoGravityResizeAspectFill;
|
|
|
|
break;
|
|
|
|
case RCTCameraAspectFit:
|
|
|
|
aspectString = AVLayerVideoGravityResizeAspect;
|
|
|
|
break;
|
|
|
|
case RCTCameraAspectStretch:
|
|
|
|
aspectString = AVLayerVideoGravityResize;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
self.previewLayer.videoGravity = aspectString;
|
2015-07-10 12:54:14 -07:00
|
|
|
}
|
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
RCT_CUSTOM_VIEW_PROPERTY(type, NSInteger, RCTCamera) {
|
|
|
|
NSInteger type = [RCTConvert NSInteger:json];
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
self.presetCamera = type;
|
|
|
|
if (self.session.isRunning) {
|
|
|
|
dispatch_async(self.sessionQueue, ^{
|
|
|
|
AVCaptureDevice *currentCaptureDevice = [self.videoCaptureDeviceInput device];
|
|
|
|
AVCaptureDevicePosition position = (AVCaptureDevicePosition)type;
|
|
|
|
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
if (captureDevice == nil) {
|
|
|
|
return;
|
|
|
|
}
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
self.presetCamera = type;
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
NSError *error = nil;
|
|
|
|
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
if (error || captureDeviceInput == nil)
|
|
|
|
{
|
|
|
|
NSLog(@"%@", error);
|
|
|
|
return;
|
|
|
|
}
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
[self.session beginConfiguration];
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
[self.session removeInput:self.videoCaptureDeviceInput];
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
if ([self.session canAddInput:captureDeviceInput])
|
|
|
|
{
|
|
|
|
[self.session addInput:captureDeviceInput];
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
|
|
|
|
self.videoCaptureDeviceInput = captureDeviceInput;
|
2016-12-08 21:05:35 -05:00
|
|
|
[self setFlashMode];
|
2016-04-14 20:34:08 -07:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
[self.session addInput:self.videoCaptureDeviceInput];
|
|
|
|
}
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-14 20:34:08 -07:00
|
|
|
[self.session commitConfiguration];
|
|
|
|
});
|
|
|
|
}
|
|
|
|
[self initializeCaptureSessionInput:AVMediaTypeVideo];
|
|
|
|
}
|
|
|
|
|
|
|
|
RCT_CUSTOM_VIEW_PROPERTY(flashMode, NSInteger, RCTCamera) {
|
2016-12-08 21:05:35 -05:00
|
|
|
self.flashMode = [RCTConvert NSInteger:json];
|
|
|
|
[self setFlashMode];
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)setFlashMode {
|
|
|
|
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
|
2016-04-14 20:34:08 -07:00
|
|
|
NSError *error = nil;
|
2016-09-09 11:27:07 -04:00
|
|
|
|
2016-12-08 21:05:35 -05:00
|
|
|
if (![device hasFlash]) return;
|
|
|
|
if (![device lockForConfiguration:&error]) {
|
|
|
|
NSLog(@"%@", error);
|
|
|
|
return;
|
2016-04-14 20:34:08 -07:00
|
|
|
}
|
2016-12-08 21:05:35 -05:00
|
|
|
if (device.hasFlash && [device isFlashModeSupported:self.flashMode])
|
2016-04-14 20:34:08 -07:00
|
|
|
{
|
2016-12-08 21:05:35 -05:00
|
|
|
NSError *error = nil;
|
|
|
|
if ([device lockForConfiguration:&error])
|
|
|
|
{
|
|
|
|
[device setFlashMode:self.flashMode];
|
|
|
|
[device unlockForConfiguration];
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
NSLog(@"%@", error);
|
|
|
|
}
|
2016-04-14 20:34:08 -07:00
|
|
|
}
|
2016-12-08 21:05:35 -05:00
|
|
|
[device unlockForConfiguration];
|
2016-04-14 20:34:08 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
RCT_CUSTOM_VIEW_PROPERTY(torchMode, NSInteger, RCTCamera) {
|
2016-04-15 12:44:18 -07:00
|
|
|
dispatch_async(self.sessionQueue, ^{
|
|
|
|
NSInteger *torchMode = [RCTConvert NSInteger:json];
|
|
|
|
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
|
|
|
|
NSError *error = nil;
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2016-04-15 12:44:18 -07:00
|
|
|
if (![device hasTorch]) return;
|
|
|
|
if (![device lockForConfiguration:&error]) {
|
|
|
|
NSLog(@"%@", error);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
[device setTorchMode: torchMode];
|
|
|
|
[device unlockForConfiguration];
|
|
|
|
});
|
2016-04-14 20:34:08 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
RCT_CUSTOM_VIEW_PROPERTY(keepAwake, BOOL, RCTCamera) {
|
|
|
|
BOOL enabled = [RCTConvert BOOL:json];
|
|
|
|
[UIApplication sharedApplication].idleTimerDisabled = enabled;
|
|
|
|
}
|
|
|
|
|
|
|
|
RCT_CUSTOM_VIEW_PROPERTY(mirrorImage, BOOL, RCTCamera) {
|
|
|
|
self.mirrorImage = [RCTConvert BOOL:json];
|
|
|
|
}
|
|
|
|
|
|
|
|
RCT_CUSTOM_VIEW_PROPERTY(barCodeTypes, NSArray, RCTCamera) {
|
|
|
|
self.barCodeTypes = [RCTConvert NSArray:json];
|
|
|
|
}
|
2015-08-11 19:23:06 +06:00
|
|
|
|
2016-04-15 12:44:18 -07:00
|
|
|
RCT_CUSTOM_VIEW_PROPERTY(captureAudio, BOOL, RCTCamera) {
|
|
|
|
BOOL captureAudio = [RCTConvert BOOL:json];
|
|
|
|
if (captureAudio) {
|
2016-05-18 11:30:07 -04:00
|
|
|
RCTLog(@"capturing audio");
|
2016-04-15 12:44:18 -07:00
|
|
|
[self initializeCaptureSessionInput:AVMediaTypeAudio];
|
|
|
|
}
|
|
|
|
}
|
2015-08-11 19:23:06 +06:00
|
|
|
|
2015-08-29 14:24:05 +06:00
|
|
|
- (NSArray *)customDirectEventTypes
|
2015-08-11 19:23:06 +06:00
|
|
|
{
|
2015-08-29 14:24:05 +06:00
|
|
|
return @[
|
|
|
|
@"focusChanged",
|
|
|
|
@"zoomChanged",
|
|
|
|
];
|
2015-08-11 19:23:06 +06:00
|
|
|
}
|
|
|
|
|
2015-04-10 19:56:30 -07:00
|
|
|
- (id)init {
|
2015-07-09 20:11:50 -07:00
|
|
|
if ((self = [super init])) {
|
2016-02-24 12:34:43 -05:00
|
|
|
self.mirrorImage = false;
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2016-03-24 10:12:57 +02:00
|
|
|
self.sensorOrientationChecker = [RCTSensorOrientationChecker new];
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
|
|
|
return self;
|
2015-04-10 19:56:30 -07:00
|
|
|
}
|
|
|
|
|
2016-01-28 11:14:00 +00:00
|
|
|
RCT_EXPORT_METHOD(checkDeviceAuthorizationStatus:(RCTPromiseResolveBlock)resolve
|
|
|
|
reject:(__unused RCTPromiseRejectBlock)reject) {
|
2015-11-30 14:42:26 -08:00
|
|
|
__block NSString *mediaType = AVMediaTypeVideo;
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
|
2015-11-30 14:42:26 -08:00
|
|
|
if (!granted) {
|
2016-01-31 19:27:46 -08:00
|
|
|
resolve(@(granted));
|
2015-11-30 14:42:26 -08:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
mediaType = AVMediaTypeAudio;
|
|
|
|
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
|
2016-01-31 19:27:46 -08:00
|
|
|
resolve(@(granted));
|
2015-11-30 14:42:26 -08:00
|
|
|
}];
|
|
|
|
}
|
2015-07-09 20:11:50 -07:00
|
|
|
}];
|
2015-03-31 18:02:57 -07:00
|
|
|
}
|
|
|
|
|
2016-03-17 15:07:39 +01:00
|
|
|
|
2016-04-03 11:19:31 -07:00
|
|
|
RCT_EXPORT_METHOD(checkVideoAuthorizationStatus:(RCTPromiseResolveBlock)resolve
|
2016-03-17 15:07:39 +01:00
|
|
|
reject:(__unused RCTPromiseRejectBlock)reject) {
|
|
|
|
__block NSString *mediaType = AVMediaTypeVideo;
|
2016-04-10 09:34:31 -07:00
|
|
|
|
2016-03-17 15:07:39 +01:00
|
|
|
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
|
|
|
|
resolve(@(granted));
|
|
|
|
}];
|
|
|
|
}
|
|
|
|
|
|
|
|
RCT_EXPORT_METHOD(checkAudioAuthorizationStatus:(RCTPromiseResolveBlock)resolve
|
|
|
|
reject:(__unused RCTPromiseRejectBlock)reject) {
|
|
|
|
__block NSString *mediaType = AVMediaTypeAudio;
|
2016-04-03 11:19:31 -07:00
|
|
|
|
2016-03-17 15:07:39 +01:00
|
|
|
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
|
|
|
|
resolve(@(granted));
|
|
|
|
}];
|
|
|
|
}
|
|
|
|
|
2015-04-10 19:56:30 -07:00
|
|
|
RCT_EXPORT_METHOD(changeOrientation:(NSInteger)orientation) {
|
2016-04-01 20:48:08 -07:00
|
|
|
[self setOrientation:orientation];
|
2015-04-10 19:56:30 -07:00
|
|
|
}
|
2016-04-03 11:19:31 -07:00
|
|
|
|
2016-01-28 11:14:00 +00:00
|
|
|
RCT_EXPORT_METHOD(capture:(NSDictionary *)options
|
|
|
|
resolve:(RCTPromiseResolveBlock)resolve
|
|
|
|
reject:(RCTPromiseRejectBlock)reject) {
|
2015-07-09 20:11:50 -07:00
|
|
|
NSInteger captureMode = [[options valueForKey:@"mode"] intValue];
|
|
|
|
NSInteger captureTarget = [[options valueForKey:@"target"] intValue];
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
if (captureMode == RCTCameraCaptureModeStill) {
|
2016-01-28 11:14:00 +00:00
|
|
|
[self captureStill:captureTarget options:options resolve:resolve reject:reject];
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
|
|
|
else if (captureMode == RCTCameraCaptureModeVideo) {
|
2016-01-28 11:14:00 +00:00
|
|
|
[self captureVideo:captureTarget options:options resolve:resolve reject:reject];
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
2015-06-14 19:16:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
RCT_EXPORT_METHOD(stopCapture) {
|
2015-07-09 20:11:50 -07:00
|
|
|
if (self.movieFileOutput.recording) {
|
|
|
|
[self.movieFileOutput stopRecording];
|
|
|
|
}
|
2015-04-10 19:56:30 -07:00
|
|
|
}
|
|
|
|
|
2016-01-31 18:18:48 -08:00
|
|
|
RCT_EXPORT_METHOD(getFOV:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject) {
|
2015-11-02 15:08:17 +01:00
|
|
|
NSArray *devices = [AVCaptureDevice devices];
|
|
|
|
AVCaptureDevice *frontCamera;
|
|
|
|
AVCaptureDevice *backCamera;
|
2016-01-31 18:12:28 -08:00
|
|
|
double frontFov = 0.0;
|
|
|
|
double backFov = 0.0;
|
2015-11-02 15:08:17 +01:00
|
|
|
|
|
|
|
for (AVCaptureDevice *device in devices) {
|
|
|
|
|
|
|
|
NSLog(@"Device name: %@", [device localizedName]);
|
|
|
|
|
|
|
|
if ([device hasMediaType:AVMediaTypeVideo]) {
|
|
|
|
|
|
|
|
if ([device position] == AVCaptureDevicePositionBack) {
|
|
|
|
NSLog(@"Device position : back");
|
|
|
|
backCamera = device;
|
|
|
|
backFov = backCamera.activeFormat.videoFieldOfView;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
NSLog(@"Device position : front");
|
|
|
|
frontCamera = device;
|
|
|
|
frontFov = frontCamera.activeFormat.videoFieldOfView;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-31 18:18:48 -08:00
|
|
|
resolve(@{
|
2016-01-31 18:12:28 -08:00
|
|
|
[NSNumber numberWithInt:RCTCameraTypeBack]: [NSNumber numberWithDouble: backFov],
|
|
|
|
[NSNumber numberWithInt:RCTCameraTypeFront]: [NSNumber numberWithDouble: frontFov]
|
2016-01-31 18:18:48 -08:00
|
|
|
});
|
2015-11-02 15:08:17 +01:00
|
|
|
}
|
|
|
|
|
2016-01-31 18:18:48 -08:00
|
|
|
RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject) {
|
2015-11-03 15:31:10 -08:00
|
|
|
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
|
2016-01-31 18:18:48 -08:00
|
|
|
resolve(@(device.hasFlash));
|
2015-11-03 15:31:10 -08:00
|
|
|
}
|
|
|
|
|
2015-07-19 13:02:35 -07:00
|
|
|
- (void)startSession {
|
2015-08-27 11:32:40 -04:00
|
|
|
#if TARGET_IPHONE_SIMULATOR
|
|
|
|
return;
|
|
|
|
#endif
|
2015-07-19 13:02:35 -07:00
|
|
|
dispatch_async(self.sessionQueue, ^{
|
|
|
|
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
|
|
|
|
self.presetCamera = AVCaptureDevicePositionBack;
|
|
|
|
}
|
2016-01-12 10:59:52 -08:00
|
|
|
|
2015-07-19 13:02:35 -07:00
|
|
|
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
|
|
|
if ([self.session canAddOutput:stillImageOutput])
|
|
|
|
{
|
|
|
|
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
|
|
|
|
[self.session addOutput:stillImageOutput];
|
|
|
|
self.stillImageOutput = stillImageOutput;
|
|
|
|
}
|
2016-01-12 10:59:52 -08:00
|
|
|
|
2015-07-19 13:02:35 -07:00
|
|
|
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
|
|
|
if ([self.session canAddOutput:movieFileOutput])
|
|
|
|
{
|
|
|
|
[self.session addOutput:movieFileOutput];
|
|
|
|
self.movieFileOutput = movieFileOutput;
|
|
|
|
}
|
2016-01-12 10:59:52 -08:00
|
|
|
|
2015-07-19 13:02:35 -07:00
|
|
|
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
|
|
|
|
if ([self.session canAddOutput:metadataOutput]) {
|
|
|
|
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
|
|
|
|
[self.session addOutput:metadataOutput];
|
2016-04-09 02:42:28 +02:00
|
|
|
[metadataOutput setMetadataObjectTypes:self.barCodeTypes];
|
2015-07-19 13:02:35 -07:00
|
|
|
self.metadataOutput = metadataOutput;
|
|
|
|
}
|
2016-01-12 10:59:52 -08:00
|
|
|
|
2015-07-19 13:02:35 -07:00
|
|
|
__weak RCTCameraManager *weakSelf = self;
|
|
|
|
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
|
|
|
|
RCTCameraManager *strongSelf = weakSelf;
|
|
|
|
dispatch_async(strongSelf.sessionQueue, ^{
|
|
|
|
// Manually restarting the session since it must have been stopped due to an error.
|
|
|
|
[strongSelf.session startRunning];
|
|
|
|
});
|
|
|
|
}]];
|
2016-01-12 10:59:52 -08:00
|
|
|
|
2015-07-19 13:02:35 -07:00
|
|
|
[self.session startRunning];
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)stopSession {
|
2015-08-27 11:32:40 -04:00
|
|
|
#if TARGET_IPHONE_SIMULATOR
|
2017-01-23 03:33:56 +00:00
|
|
|
self.camera = nil;
|
2015-08-27 11:32:40 -04:00
|
|
|
return;
|
|
|
|
#endif
|
2015-07-19 13:02:35 -07:00
|
|
|
dispatch_async(self.sessionQueue, ^{
|
2016-09-13 05:31:46 +02:00
|
|
|
self.camera = nil;
|
2015-07-19 13:02:35 -07:00
|
|
|
[self.previewLayer removeFromSuperlayer];
|
2016-06-05 12:40:32 -07:00
|
|
|
[self.session commitConfiguration];
|
2015-07-19 13:02:35 -07:00
|
|
|
[self.session stopRunning];
|
|
|
|
for(AVCaptureInput *input in self.session.inputs) {
|
|
|
|
[self.session removeInput:input];
|
|
|
|
}
|
2016-01-12 10:59:52 -08:00
|
|
|
|
2015-07-19 13:02:35 -07:00
|
|
|
for(AVCaptureOutput *output in self.session.outputs) {
|
|
|
|
[self.session removeOutput:output];
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
- (void)initializeCaptureSessionInput:(NSString *)type {
|
|
|
|
dispatch_async(self.sessionQueue, ^{
|
2016-04-15 12:44:18 -07:00
|
|
|
if (type == AVMediaTypeAudio) {
|
|
|
|
for (AVCaptureDeviceInput* input in [self.session inputs]) {
|
|
|
|
if ([input.device hasMediaType:AVMediaTypeAudio]) {
|
|
|
|
// If an audio input has been configured we don't need to set it up again
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-06-05 12:40:32 -07:00
|
|
|
|
2015-07-19 14:12:02 -07:00
|
|
|
[self.session beginConfiguration];
|
2016-01-12 10:59:52 -08:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
NSError *error = nil;
|
2015-07-19 14:12:02 -07:00
|
|
|
AVCaptureDevice *captureDevice;
|
2016-01-12 10:59:52 -08:00
|
|
|
|
2015-07-19 14:12:02 -07:00
|
|
|
if (type == AVMediaTypeAudio) {
|
|
|
|
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
|
|
|
}
|
|
|
|
else if (type == AVMediaTypeVideo) {
|
|
|
|
captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
|
|
|
|
}
|
2016-01-12 10:59:52 -08:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
if (captureDevice == nil) {
|
|
|
|
return;
|
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
if (error || captureDeviceInput == nil) {
|
|
|
|
NSLog(@"%@", error);
|
|
|
|
return;
|
2015-06-12 15:23:47 -07:00
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2016-04-15 12:44:18 -07:00
|
|
|
if (type == AVMediaTypeVideo) {
|
2015-07-09 20:11:50 -07:00
|
|
|
[self.session removeInput:self.videoCaptureDeviceInput];
|
2015-06-12 15:23:47 -07:00
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
if ([self.session canAddInput:captureDeviceInput]) {
|
|
|
|
[self.session addInput:captureDeviceInput];
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
if (type == AVMediaTypeAudio) {
|
|
|
|
self.audioCaptureDeviceInput = captureDeviceInput;
|
|
|
|
}
|
|
|
|
else if (type == AVMediaTypeVideo) {
|
|
|
|
self.videoCaptureDeviceInput = captureDeviceInput;
|
2016-12-08 21:05:35 -05:00
|
|
|
[self setFlashMode];
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
2015-07-10 12:54:14 -07:00
|
|
|
[self.metadataOutput setMetadataObjectTypes:self.metadataOutput.availableMetadataObjectTypes];
|
2015-06-12 15:23:47 -07:00
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
[self.session commitConfiguration];
|
|
|
|
});
|
2015-04-30 12:40:19 -07:00
|
|
|
}
|
|
|
|
|
2016-05-10 21:05:03 +03:00
|
|
|
- (void)captureStill:(NSInteger)target options:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
|
|
|
|
{
|
|
|
|
AVCaptureVideoOrientation orientation = options[@"orientation"] != nil ? [options[@"orientation"] integerValue] : self.orientation;
|
|
|
|
if (orientation == RCTCameraOrientationAuto) {
|
2016-05-16 19:28:47 +03:00
|
|
|
#if TARGET_IPHONE_SIMULATOR
|
|
|
|
[self captureStill:target options:options orientation:self.previewLayer.connection.videoOrientation resolve:resolve reject:reject];
|
|
|
|
#else
|
|
|
|
[self.sensorOrientationChecker getDeviceOrientationWithBlock:^(UIInterfaceOrientation orientation) {
|
|
|
|
[self captureStill:target options:options orientation:[self.sensorOrientationChecker convertToAVCaptureVideoOrientation: orientation] resolve:resolve reject:reject];
|
|
|
|
}];
|
|
|
|
#endif
|
2016-05-10 21:05:03 +03:00
|
|
|
} else {
|
|
|
|
[self captureStill:target options:options orientation:orientation resolve:resolve reject:reject];
|
|
|
|
}
|
|
|
|
}
|
2015-07-13 16:30:08 -07:00
|
|
|
|
2016-05-10 21:05:03 +03:00
|
|
|
- (void)captureStill:(NSInteger)target options:(NSDictionary *)options orientation:(AVCaptureVideoOrientation)orientation resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
|
|
|
|
{
|
2015-07-13 16:30:08 -07:00
|
|
|
dispatch_async(self.sessionQueue, ^{
|
2015-08-27 11:32:40 -04:00
|
|
|
#if TARGET_IPHONE_SIMULATOR
|
2015-07-13 16:30:08 -07:00
|
|
|
CGSize size = CGSizeMake(720, 1280);
|
|
|
|
UIGraphicsBeginImageContextWithOptions(size, YES, 0);
|
2016-01-24 06:59:43 -05:00
|
|
|
// Thanks https://gist.github.com/kylefox/1689973
|
|
|
|
CGFloat hue = ( arc4random() % 256 / 256.0 ); // 0.0 to 1.0
|
|
|
|
CGFloat saturation = ( arc4random() % 128 / 256.0 ) + 0.5; // 0.5 to 1.0, away from white
|
|
|
|
CGFloat brightness = ( arc4random() % 128 / 256.0 ) + 0.5; // 0.5 to 1.0, away from black
|
|
|
|
UIColor *color = [UIColor colorWithHue:hue saturation:saturation brightness:brightness alpha:1];
|
|
|
|
[color setFill];
|
|
|
|
UIRectFill(CGRectMake(0, 0, size.width, size.height));
|
|
|
|
NSDate *currentDate = [NSDate date];
|
|
|
|
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
|
|
|
|
[dateFormatter setDateFormat:@"dd.MM.YY HH:mm:ss"];
|
|
|
|
NSString *text = [dateFormatter stringFromDate:currentDate];
|
|
|
|
UIFont *font = [UIFont systemFontOfSize:40.0];
|
|
|
|
NSDictionary *attributes = [NSDictionary dictionaryWithObjects:
|
|
|
|
@[font, [UIColor blackColor]]
|
|
|
|
forKeys:
|
|
|
|
@[NSFontAttributeName, NSForegroundColorAttributeName]];
|
|
|
|
[text drawAtPoint:CGPointMake(size.width/3, size.height/2) withAttributes:attributes];
|
|
|
|
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
2015-07-13 16:30:08 -07:00
|
|
|
UIGraphicsEndImageContext();
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-13 16:30:08 -07:00
|
|
|
NSData *imageData = UIImageJPEGRepresentation(image, 1.0);
|
2016-01-28 11:14:00 +00:00
|
|
|
[self saveImage:imageData target:target metadata:nil resolve:resolve reject:reject];
|
2015-08-27 11:32:40 -04:00
|
|
|
#else
|
2016-05-10 21:05:03 +03:00
|
|
|
[[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:orientation];
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-13 16:30:08 -07:00
|
|
|
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
|
2015-07-10 01:17:49 -07:00
|
|
|
|
2015-07-16 00:01:26 -07:00
|
|
|
if (imageDataSampleBuffer) {
|
|
|
|
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
|
|
|
|
|
|
|
|
// Create image source
|
|
|
|
CGImageSourceRef source = CGImageSourceCreateWithData((CFDataRef)imageData, NULL);
|
|
|
|
//get all the metadata in the image
|
|
|
|
NSMutableDictionary *imageMetadata = [(NSDictionary *) CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(source, 0, NULL)) mutableCopy];
|
|
|
|
|
|
|
|
// create cgimage
|
2017-05-17 18:32:53 +01:00
|
|
|
CGImageRef cgImage = CGImageSourceCreateImageAtIndex(source, 0, NULL);
|
2015-07-16 00:01:26 -07:00
|
|
|
|
|
|
|
// Rotate it
|
|
|
|
CGImageRef rotatedCGImage;
|
|
|
|
if ([options objectForKey:@"rotation"]) {
|
|
|
|
float rotation = [[options objectForKey:@"rotation"] floatValue];
|
2017-05-17 18:32:53 +01:00
|
|
|
rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:rotation];
|
2017-05-31 18:31:31 +07:00
|
|
|
} else if ([[options objectForKey:@"fixOrientation"] boolValue] == YES) {
|
2015-07-16 00:01:26 -07:00
|
|
|
// Get metadata orientation
|
|
|
|
int metadataOrientation = [[imageMetadata objectForKey:(NSString *)kCGImagePropertyOrientation] intValue];
|
2017-05-30 05:28:20 +07:00
|
|
|
|
2017-05-17 18:32:53 +01:00
|
|
|
bool rotated = false;
|
|
|
|
//see http://www.impulseadventure.com/photo/exif-orientation.html
|
2015-07-16 00:01:26 -07:00
|
|
|
if (metadataOrientation == 6) {
|
2017-05-17 18:32:53 +01:00
|
|
|
rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:270];
|
|
|
|
rotated = true;
|
2015-07-16 00:01:26 -07:00
|
|
|
} else if (metadataOrientation == 3) {
|
2017-05-17 18:32:53 +01:00
|
|
|
rotatedCGImage = [self newCGImageRotatedByAngle:cgImage angle:180];
|
|
|
|
rotated = true;
|
2017-05-30 05:28:20 +07:00
|
|
|
} else {
|
|
|
|
rotatedCGImage = cgImage;
|
2015-07-16 00:01:26 -07:00
|
|
|
}
|
2017-05-30 05:28:20 +07:00
|
|
|
|
2017-05-17 18:32:53 +01:00
|
|
|
if(rotated) {
|
|
|
|
[imageMetadata setObject:[NSNumber numberWithInteger:1] forKey:(NSString *)kCGImagePropertyOrientation];
|
|
|
|
CGImageRelease(cgImage);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
rotatedCGImage = cgImage;
|
2015-07-13 16:30:08 -07:00
|
|
|
}
|
2015-07-16 00:01:26 -07:00
|
|
|
|
|
|
|
// Erase stupid TIFF stuff
|
|
|
|
[imageMetadata removeObjectForKey:(NSString *)kCGImagePropertyTIFFDictionary];
|
|
|
|
|
|
|
|
// Add input metadata
|
|
|
|
[imageMetadata mergeMetadata:[options objectForKey:@"metadata"]];
|
|
|
|
|
|
|
|
// Create destination thing
|
|
|
|
NSMutableData *rotatedImageData = [NSMutableData data];
|
|
|
|
CGImageDestinationRef destination = CGImageDestinationCreateWithData((CFMutableDataRef)rotatedImageData, CGImageSourceGetType(source), 1, NULL);
|
|
|
|
CFRelease(source);
|
|
|
|
// add the image to the destination, reattaching metadata
|
|
|
|
CGImageDestinationAddImage(destination, rotatedCGImage, (CFDictionaryRef) imageMetadata);
|
|
|
|
// And write
|
|
|
|
CGImageDestinationFinalize(destination);
|
|
|
|
CFRelease(destination);
|
|
|
|
|
2016-01-28 11:14:00 +00:00
|
|
|
[self saveImage:rotatedImageData target:target metadata:imageMetadata resolve:resolve reject:reject];
|
2015-07-16 00:01:26 -07:00
|
|
|
|
|
|
|
CGImageRelease(rotatedCGImage);
|
2015-07-13 16:30:08 -07:00
|
|
|
}
|
|
|
|
else {
|
2016-01-28 11:14:00 +00:00
|
|
|
reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
2015-07-13 16:30:08 -07:00
|
|
|
}
|
|
|
|
}];
|
2015-08-27 11:32:40 -04:00
|
|
|
#endif
|
2015-07-13 16:30:08 -07:00
|
|
|
});
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
|
|
|
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2016-01-28 11:14:00 +00:00
|
|
|
- (void)saveImage:(NSData*)imageData target:(NSInteger)target metadata:(NSDictionary *)metadata resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject {
|
2015-07-09 20:11:50 -07:00
|
|
|
NSString *responseString;
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
if (target == RCTCameraCaptureTargetMemory) {
|
2016-03-31 12:59:26 -07:00
|
|
|
resolve(@{@"data":[imageData base64EncodedStringWithOptions:0]});
|
|
|
|
return;
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
2015-07-10 01:17:49 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
else if (target == RCTCameraCaptureTargetDisk) {
|
2015-07-10 01:17:49 -07:00
|
|
|
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
|
|
|
NSString *documentsDirectory = [paths firstObject];
|
2016-04-03 11:19:31 -07:00
|
|
|
|
2015-07-10 01:17:49 -07:00
|
|
|
NSFileManager *fileManager = [NSFileManager defaultManager];
|
|
|
|
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"jpg"];
|
2016-04-03 11:19:31 -07:00
|
|
|
|
2015-07-10 01:17:49 -07:00
|
|
|
[fileManager createFileAtPath:fullPath contents:imageData attributes:nil];
|
|
|
|
responseString = fullPath;
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
2016-04-03 11:19:31 -07:00
|
|
|
|
2016-01-12 10:59:52 -08:00
|
|
|
else if (target == RCTCameraCaptureTargetTemp) {
|
|
|
|
NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
|
|
|
|
NSString *fullPath = [NSString stringWithFormat:@"%@%@.jpg", NSTemporaryDirectory(), fileName];
|
2016-04-03 11:19:31 -07:00
|
|
|
|
2016-01-12 10:59:52 -08:00
|
|
|
[imageData writeToFile:fullPath atomically:YES];
|
|
|
|
responseString = fullPath;
|
|
|
|
}
|
2015-07-10 01:17:49 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
else if (target == RCTCameraCaptureTargetCameraRoll) {
|
2015-07-10 01:17:49 -07:00
|
|
|
[[[ALAssetsLibrary alloc] init] writeImageDataToSavedPhotosAlbum:imageData metadata:metadata completionBlock:^(NSURL* url, NSError* error) {
|
2015-07-09 20:11:50 -07:00
|
|
|
if (error == nil) {
|
2017-03-23 15:41:02 +00:00
|
|
|
//path isn't really applicable here (this is an asset uri), but left it in for backward comparability
|
|
|
|
resolve(@{@"path":[url absoluteString], @"mediaUri":[url absoluteString]});
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
|
|
|
else {
|
2016-01-28 11:14:00 +00:00
|
|
|
reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
|
|
|
}];
|
|
|
|
return;
|
|
|
|
}
|
2016-03-31 12:59:26 -07:00
|
|
|
resolve(@{@"path":responseString});
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
|
|
|
|
2015-07-15 23:56:42 -07:00
|
|
|
- (CGImageRef)newCGImageRotatedByAngle:(CGImageRef)imgRef angle:(CGFloat)angle
|
2015-07-10 01:17:49 -07:00
|
|
|
{
|
|
|
|
CGFloat angleInRadians = angle * (M_PI / 180);
|
|
|
|
CGFloat width = CGImageGetWidth(imgRef);
|
|
|
|
CGFloat height = CGImageGetHeight(imgRef);
|
|
|
|
|
|
|
|
CGRect imgRect = CGRectMake(0, 0, width, height);
|
|
|
|
CGAffineTransform transform = CGAffineTransformMakeRotation(angleInRadians);
|
|
|
|
CGRect rotatedRect = CGRectApplyAffineTransform(imgRect, transform);
|
|
|
|
|
|
|
|
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
|
|
CGContextRef bmContext = CGBitmapContextCreate(NULL, rotatedRect.size.width, rotatedRect.size.height, 8, 0, colorSpace, (CGBitmapInfo) kCGImageAlphaPremultipliedFirst);
|
|
|
|
|
2016-02-24 12:34:43 -05:00
|
|
|
if (self.mirrorImage) {
|
|
|
|
CGAffineTransform transform = CGAffineTransformMakeTranslation(rotatedRect.size.width, 0.0);
|
|
|
|
transform = CGAffineTransformScale(transform, -1.0, 1.0);
|
|
|
|
CGContextConcatCTM(bmContext, transform);
|
|
|
|
}
|
|
|
|
|
2015-07-10 01:17:49 -07:00
|
|
|
CGContextSetAllowsAntialiasing(bmContext, TRUE);
|
|
|
|
CGContextSetInterpolationQuality(bmContext, kCGInterpolationNone);
|
|
|
|
|
|
|
|
CGColorSpaceRelease(colorSpace);
|
|
|
|
|
|
|
|
CGContextTranslateCTM(bmContext, +(rotatedRect.size.width/2), +(rotatedRect.size.height/2));
|
|
|
|
CGContextRotateCTM(bmContext, angleInRadians);
|
|
|
|
CGContextTranslateCTM(bmContext, -(rotatedRect.size.width/2), -(rotatedRect.size.height/2));
|
|
|
|
|
|
|
|
CGContextDrawImage(bmContext, CGRectMake((rotatedRect.size.width-width)/2.0f, (rotatedRect.size.height-height)/2.0f, width, height), imgRef);
|
|
|
|
|
|
|
|
CGImageRef rotatedImage = CGBitmapContextCreateImage(bmContext);
|
|
|
|
CFRelease(bmContext);
|
|
|
|
return rotatedImage;
|
|
|
|
}
|
|
|
|
|
2016-05-10 21:05:03 +03:00
|
|
|
-(void)captureVideo:(NSInteger)target options:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
|
|
|
|
{
|
|
|
|
AVCaptureVideoOrientation orientation = options[@"orientation"] != nil ? [options[@"orientation"] integerValue] : self.orientation;
|
|
|
|
if (orientation == RCTCameraOrientationAuto) {
|
|
|
|
[self.sensorOrientationChecker getDeviceOrientationWithBlock:^(UIInterfaceOrientation orientation) {
|
|
|
|
[self captureVideo:target options:options orientation:[self.sensorOrientationChecker convertToAVCaptureVideoOrientation: orientation] resolve:resolve reject:reject];
|
|
|
|
}];
|
|
|
|
} else {
|
|
|
|
[self captureVideo:target options:options orientation:orientation resolve:resolve reject:reject];
|
|
|
|
}
|
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2016-05-10 21:05:03 +03:00
|
|
|
-(void)captureVideo:(NSInteger)target options:(NSDictionary *)options orientation:(AVCaptureVideoOrientation)orientation resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject
|
|
|
|
{
|
2015-07-09 20:11:50 -07:00
|
|
|
if (self.movieFileOutput.recording) {
|
2016-01-28 11:14:00 +00:00
|
|
|
reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Already recording"));
|
2015-07-09 20:11:50 -07:00
|
|
|
return;
|
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2016-04-15 12:44:18 -07:00
|
|
|
if ([[options valueForKey:@"audio"] boolValue]) {
|
2015-07-09 20:11:50 -07:00
|
|
|
[self initializeCaptureSessionInput:AVMediaTypeAudio];
|
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
Float64 totalSeconds = [[options valueForKey:@"totalSeconds"] floatValue];
|
|
|
|
if (totalSeconds > -1) {
|
|
|
|
int32_t preferredTimeScale = [[options valueForKey:@"preferredTimeScale"] intValue];
|
|
|
|
CMTime maxDuration = CMTimeMakeWithSeconds(totalSeconds, preferredTimeScale);
|
|
|
|
self.movieFileOutput.maxRecordedDuration = maxDuration;
|
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
dispatch_async(self.sessionQueue, ^{
|
2016-05-10 21:05:03 +03:00
|
|
|
[[self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:orientation];
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-06-14 19:16:09 +02:00
|
|
|
//Create temporary URL to record to
|
|
|
|
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
|
|
|
|
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
|
|
|
|
NSFileManager *fileManager = [NSFileManager defaultManager];
|
2015-07-02 13:04:25 -07:00
|
|
|
if ([fileManager fileExistsAtPath:outputPath]) {
|
2016-05-10 21:05:03 +03:00
|
|
|
NSError *error;
|
|
|
|
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
|
|
|
|
reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
|
|
|
return;
|
|
|
|
}
|
2015-06-14 19:16:09 +02:00
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-06-14 19:16:09 +02:00
|
|
|
//Start recording
|
|
|
|
[self.movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2016-01-28 11:14:00 +00:00
|
|
|
self.videoResolve = resolve;
|
|
|
|
self.videoReject = reject;
|
2015-07-02 13:04:25 -07:00
|
|
|
self.videoTarget = target;
|
2015-07-09 20:11:50 -07:00
|
|
|
});
|
2015-06-14 19:16:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
|
|
|
|
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
|
|
|
|
fromConnections:(NSArray *)connections
|
|
|
|
error:(NSError *)error
|
|
|
|
{
|
2015-07-09 20:11:50 -07:00
|
|
|
BOOL recordSuccess = YES;
|
|
|
|
if ([error code] != noErr) {
|
|
|
|
// A problem occurred: Find out if the recording was successful.
|
|
|
|
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
|
|
|
|
if (value) {
|
|
|
|
recordSuccess = [value boolValue];
|
2015-07-02 13:04:25 -07:00
|
|
|
}
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
|
|
|
if (!recordSuccess) {
|
2016-01-28 11:14:00 +00:00
|
|
|
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Error while recording"));
|
2015-07-09 20:11:50 -07:00
|
|
|
return;
|
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2016-03-31 12:59:26 -07:00
|
|
|
AVURLAsset* videoAsAsset = [AVURLAsset URLAssetWithURL:outputFileURL options:nil];
|
|
|
|
AVAssetTrack* videoTrack = [[videoAsAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
|
|
|
|
float videoWidth;
|
|
|
|
float videoHeight;
|
2016-04-10 09:34:31 -07:00
|
|
|
|
2016-03-31 12:59:26 -07:00
|
|
|
CGSize videoSize = [videoTrack naturalSize];
|
|
|
|
CGAffineTransform txf = [videoTrack preferredTransform];
|
|
|
|
|
|
|
|
if ((txf.tx == videoSize.width && txf.ty == videoSize.height) || (txf.tx == 0 && txf.ty == 0)) {
|
|
|
|
// Video recorded in landscape orientation
|
|
|
|
videoWidth = videoSize.width;
|
|
|
|
videoHeight = videoSize.height;
|
|
|
|
} else {
|
|
|
|
// Video recorded in portrait orientation, so have to swap reported width/height
|
|
|
|
videoWidth = videoSize.height;
|
|
|
|
videoHeight = videoSize.width;
|
|
|
|
}
|
2016-04-10 09:34:31 -07:00
|
|
|
|
2016-03-31 12:59:26 -07:00
|
|
|
NSMutableDictionary *videoInfo = [NSMutableDictionary dictionaryWithDictionary:@{
|
|
|
|
@"duration":[NSNumber numberWithFloat:CMTimeGetSeconds(videoAsAsset.duration)],
|
|
|
|
@"width":[NSNumber numberWithFloat:videoWidth],
|
|
|
|
@"height":[NSNumber numberWithFloat:videoHeight],
|
|
|
|
@"size":[NSNumber numberWithLongLong:captureOutput.recordedFileSize],
|
|
|
|
}];
|
2016-04-10 09:34:31 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
if (self.videoTarget == RCTCameraCaptureTargetCameraRoll) {
|
|
|
|
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
|
|
|
|
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
|
|
|
|
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
|
2015-07-02 13:04:25 -07:00
|
|
|
completionBlock:^(NSURL *assetURL, NSError *error) {
|
2015-07-09 20:11:50 -07:00
|
|
|
if (error) {
|
2016-01-28 11:14:00 +00:00
|
|
|
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
2015-07-09 20:11:50 -07:00
|
|
|
return;
|
|
|
|
}
|
2016-03-31 12:59:26 -07:00
|
|
|
[videoInfo setObject:[assetURL absoluteString] forKey:@"path"];
|
|
|
|
self.videoResolve(videoInfo);
|
2015-07-09 20:11:50 -07:00
|
|
|
}];
|
2015-07-02 13:04:25 -07:00
|
|
|
}
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
|
|
|
else if (self.videoTarget == RCTCameraCaptureTargetDisk) {
|
|
|
|
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
|
|
|
NSString *documentsDirectory = [paths firstObject];
|
|
|
|
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"mov"];
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
NSFileManager * fileManager = [NSFileManager defaultManager];
|
|
|
|
NSError * error = nil;
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2016-03-31 13:50:03 -07:00
|
|
|
//moving to destination
|
|
|
|
if (!([fileManager moveItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
|
2016-01-28 11:14:00 +00:00
|
|
|
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
2015-07-09 20:11:50 -07:00
|
|
|
return;
|
2015-06-14 19:16:09 +02:00
|
|
|
}
|
2016-03-31 12:59:26 -07:00
|
|
|
[videoInfo setObject:fullPath forKey:@"path"];
|
|
|
|
self.videoResolve(videoInfo);
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
2016-01-12 10:59:52 -08:00
|
|
|
else if (self.videoTarget == RCTCameraCaptureTargetTemp) {
|
|
|
|
NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
|
|
|
|
NSString *fullPath = [NSString stringWithFormat:@"%@%@.mov", NSTemporaryDirectory(), fileName];
|
2016-04-03 11:19:31 -07:00
|
|
|
|
2016-01-12 10:59:52 -08:00
|
|
|
NSFileManager * fileManager = [NSFileManager defaultManager];
|
|
|
|
NSError * error = nil;
|
2016-04-03 11:19:31 -07:00
|
|
|
|
2016-03-31 13:50:03 -07:00
|
|
|
//moving to destination
|
|
|
|
if (!([fileManager moveItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
|
2016-01-28 11:14:00 +00:00
|
|
|
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description));
|
2016-01-12 10:59:52 -08:00
|
|
|
return;
|
|
|
|
}
|
2016-03-31 12:59:26 -07:00
|
|
|
[videoInfo setObject:fullPath forKey:@"path"];
|
|
|
|
self.videoResolve(videoInfo);
|
2016-01-12 10:59:52 -08:00
|
|
|
}
|
2015-07-09 20:11:50 -07:00
|
|
|
else {
|
2016-01-28 11:14:00 +00:00
|
|
|
self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Target not supported"));
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
2015-06-14 19:16:09 +02:00
|
|
|
}
|
|
|
|
|
2015-04-19 15:53:30 -07:00
|
|
|
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
for (AVMetadataMachineReadableCodeObject *metadata in metadataObjects) {
|
2016-04-14 20:34:08 -07:00
|
|
|
for (id barcodeType in self.barCodeTypes) {
|
2016-04-09 02:51:43 +02:00
|
|
|
if ([metadata.type isEqualToString:barcodeType]) {
|
2016-03-16 14:35:46 -07:00
|
|
|
// Transform the meta-data coordinates to screen coords
|
|
|
|
AVMetadataMachineReadableCodeObject *transformed = (AVMetadataMachineReadableCodeObject *)[_previewLayer transformedMetadataObjectForMetadataObject:metadata];
|
2016-04-03 11:19:31 -07:00
|
|
|
|
2016-01-31 19:27:46 -08:00
|
|
|
NSDictionary *event = @{
|
|
|
|
@"type": metadata.type,
|
|
|
|
@"data": metadata.stringValue,
|
|
|
|
@"bounds": @{
|
|
|
|
@"origin": @{
|
2016-03-16 14:35:46 -07:00
|
|
|
@"x": [NSString stringWithFormat:@"%f", transformed.bounds.origin.x],
|
|
|
|
@"y": [NSString stringWithFormat:@"%f", transformed.bounds.origin.y]
|
2016-01-31 19:27:46 -08:00
|
|
|
},
|
|
|
|
@"size": @{
|
2016-03-16 14:35:46 -07:00
|
|
|
@"height": [NSString stringWithFormat:@"%f", transformed.bounds.size.height],
|
|
|
|
@"width": [NSString stringWithFormat:@"%f", transformed.bounds.size.width],
|
2016-01-31 19:27:46 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2016-01-31 19:27:46 -08:00
|
|
|
[self.bridge.eventDispatcher sendAppEventWithName:@"CameraBarCodeRead" body:event];
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
2015-04-19 15:53:30 -07:00
|
|
|
}
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
2015-04-19 15:53:30 -07:00
|
|
|
}
|
|
|
|
|
2015-04-10 19:56:30 -07:00
|
|
|
|
|
|
|
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
|
2015-04-02 02:40:03 -07:00
|
|
|
{
|
2015-07-09 20:11:50 -07:00
|
|
|
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
|
|
|
|
AVCaptureDevice *captureDevice = [devices firstObject];
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
for (AVCaptureDevice *device in devices)
|
|
|
|
{
|
|
|
|
if ([device position] == position)
|
2015-04-10 19:56:30 -07:00
|
|
|
{
|
2015-07-09 20:11:50 -07:00
|
|
|
captureDevice = device;
|
|
|
|
break;
|
2015-04-10 19:56:30 -07:00
|
|
|
}
|
2015-07-09 20:11:50 -07:00
|
|
|
}
|
2015-07-10 01:07:23 -07:00
|
|
|
|
2015-07-09 20:11:50 -07:00
|
|
|
return captureDevice;
|
2015-04-02 02:40:03 -07:00
|
|
|
}
|
|
|
|
|
2015-04-10 19:56:30 -07:00
|
|
|
- (void)subjectAreaDidChange:(NSNotification *)notification
|
|
|
|
{
|
2015-07-09 20:11:50 -07:00
|
|
|
CGPoint devicePoint = CGPointMake(.5, .5);
|
|
|
|
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
|
2015-04-10 19:56:30 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
|
|
|
|
{
|
2015-07-09 20:11:50 -07:00
|
|
|
dispatch_async([self sessionQueue], ^{
|
|
|
|
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
|
|
|
|
NSError *error = nil;
|
|
|
|
if ([device lockForConfiguration:&error])
|
|
|
|
{
|
|
|
|
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
|
|
|
|
{
|
|
|
|
[device setFocusMode:focusMode];
|
|
|
|
[device setFocusPointOfInterest:point];
|
|
|
|
}
|
|
|
|
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
|
|
|
|
{
|
|
|
|
[device setExposureMode:exposureMode];
|
|
|
|
[device setExposurePointOfInterest:point];
|
|
|
|
}
|
|
|
|
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
|
|
|
|
[device unlockForConfiguration];
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
NSLog(@"%@", error);
|
|
|
|
}
|
|
|
|
});
|
2015-04-02 02:40:03 -07:00
|
|
|
}
|
|
|
|
|
2016-01-31 19:27:46 -08:00
|
|
|
- (void)focusAtThePoint:(CGPoint) atPoint;
|
2015-07-27 16:08:33 +06:00
|
|
|
{
|
|
|
|
Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice");
|
|
|
|
if (captureDeviceClass != nil) {
|
|
|
|
dispatch_async([self sessionQueue], ^{
|
|
|
|
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
|
|
|
|
if([device isFocusPointOfInterestSupported] &&
|
|
|
|
[device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
|
iOS: minor focus and zoom changes (#578)
* iOS: minor focus and zoom changes
RCTCameraManager focusAtThePoint improvements
- Dynamicaly get size from bounds of camera view, instead of full screen, since
it might not always be the case that the Camera react component is taking up
the entire device screen.
- Also add setting exposure to the same point of interest we are focusing on.
Example
- Add onFocusChanged and onZoomChanged empty callback functions by default to
Example app, allowing tap-to-focus and pinch-to-zoom to be readily
experienced/experimented with.
- Updated react/react-native dependencies to match root package.json.
TODO/Other remarks
- Tap-to-focus seems to still not work perfectly... From logging, it always seems
to get the right location in the view (meaning its getting the right location
from the user touch and transforming it to the {0, 1} range appropriately), and
does indeed engage the focus process, but it seems it sometimes refocuses on the
center-ish region of what's in the camera instead of the location that was actually
pressed. I thought this might be related to the subjectAreaDidChange getting called,
which in turn sets the focus mode to continuous auto-focus at the view center, but
from my experimenting, this method never actually gets called. I wasn't able to
figure out if there's somewhere else in the library that's forcing continuous auto-focus,
or if there's just some bug in our current focus procedure within focusAtThePoint.
* Reset Example/package.json dependency versions to master
2017-04-23 14:01:04 -07:00
|
|
|
CGRect cameraViewRect = [[self camera] bounds];
|
|
|
|
double cameraViewWidth = cameraViewRect.size.width;
|
|
|
|
double cameraViewHeight = cameraViewRect.size.height;
|
|
|
|
double focus_x = atPoint.x/cameraViewWidth;
|
|
|
|
double focus_y = atPoint.y/cameraViewHeight;
|
|
|
|
CGPoint cameraViewPoint = CGPointMake(focus_x, focus_y);
|
2015-07-27 16:08:33 +06:00
|
|
|
if([device lockForConfiguration:nil]) {
|
iOS: minor focus and zoom changes (#578)
* iOS: minor focus and zoom changes
RCTCameraManager focusAtThePoint improvements
- Dynamicaly get size from bounds of camera view, instead of full screen, since
it might not always be the case that the Camera react component is taking up
the entire device screen.
- Also add setting exposure to the same point of interest we are focusing on.
Example
- Add onFocusChanged and onZoomChanged empty callback functions by default to
Example app, allowing tap-to-focus and pinch-to-zoom to be readily
experienced/experimented with.
- Updated react/react-native dependencies to match root package.json.
TODO/Other remarks
- Tap-to-focus seems to still not work perfectly... From logging, it always seems
to get the right location in the view (meaning its getting the right location
from the user touch and transforming it to the {0, 1} range appropriately), and
does indeed engage the focus process, but it seems it sometimes refocuses on the
center-ish region of what's in the camera instead of the location that was actually
pressed. I thought this might be related to the subjectAreaDidChange getting called,
which in turn sets the focus mode to continuous auto-focus at the view center, but
from my experimenting, this method never actually gets called. I wasn't able to
figure out if there's somewhere else in the library that's forcing continuous auto-focus,
or if there's just some bug in our current focus procedure within focusAtThePoint.
* Reset Example/package.json dependency versions to master
2017-04-23 14:01:04 -07:00
|
|
|
[device setFocusPointOfInterest:cameraViewPoint];
|
2015-07-27 16:08:33 +06:00
|
|
|
[device setFocusMode:AVCaptureFocusModeAutoFocus];
|
iOS: minor focus and zoom changes (#578)
* iOS: minor focus and zoom changes
RCTCameraManager focusAtThePoint improvements
- Dynamicaly get size from bounds of camera view, instead of full screen, since
it might not always be the case that the Camera react component is taking up
the entire device screen.
- Also add setting exposure to the same point of interest we are focusing on.
Example
- Add onFocusChanged and onZoomChanged empty callback functions by default to
Example app, allowing tap-to-focus and pinch-to-zoom to be readily
experienced/experimented with.
- Updated react/react-native dependencies to match root package.json.
TODO/Other remarks
- Tap-to-focus seems to still not work perfectly... From logging, it always seems
to get the right location in the view (meaning its getting the right location
from the user touch and transforming it to the {0, 1} range appropriately), and
does indeed engage the focus process, but it seems it sometimes refocuses on the
center-ish region of what's in the camera instead of the location that was actually
pressed. I thought this might be related to the subjectAreaDidChange getting called,
which in turn sets the focus mode to continuous auto-focus at the view center, but
from my experimenting, this method never actually gets called. I wasn't able to
figure out if there's somewhere else in the library that's forcing continuous auto-focus,
or if there's just some bug in our current focus procedure within focusAtThePoint.
* Reset Example/package.json dependency versions to master
2017-04-23 14:01:04 -07:00
|
|
|
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
|
2015-07-27 16:08:33 +06:00
|
|
|
[device setExposureMode:AVCaptureExposureModeAutoExpose];
|
iOS: minor focus and zoom changes (#578)
* iOS: minor focus and zoom changes
RCTCameraManager focusAtThePoint improvements
- Dynamicaly get size from bounds of camera view, instead of full screen, since
it might not always be the case that the Camera react component is taking up
the entire device screen.
- Also add setting exposure to the same point of interest we are focusing on.
Example
- Add onFocusChanged and onZoomChanged empty callback functions by default to
Example app, allowing tap-to-focus and pinch-to-zoom to be readily
experienced/experimented with.
- Updated react/react-native dependencies to match root package.json.
TODO/Other remarks
- Tap-to-focus seems to still not work perfectly... From logging, it always seems
to get the right location in the view (meaning its getting the right location
from the user touch and transforming it to the {0, 1} range appropriately), and
does indeed engage the focus process, but it seems it sometimes refocuses on the
center-ish region of what's in the camera instead of the location that was actually
pressed. I thought this might be related to the subjectAreaDidChange getting called,
which in turn sets the focus mode to continuous auto-focus at the view center, but
from my experimenting, this method never actually gets called. I wasn't able to
figure out if there's somewhere else in the library that's forcing continuous auto-focus,
or if there's just some bug in our current focus procedure within focusAtThePoint.
* Reset Example/package.json dependency versions to master
2017-04-23 14:01:04 -07:00
|
|
|
[device setExposurePointOfInterest:cameraViewPoint];
|
2015-07-27 16:08:33 +06:00
|
|
|
}
|
|
|
|
[device unlockForConfiguration];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-31 19:27:46 -08:00
|
|
|
- (void)zoom:(CGFloat)velocity reactTag:(NSNumber *)reactTag{
|
2016-05-28 20:12:09 +01:00
|
|
|
if (isnan(velocity)) {
|
|
|
|
return;
|
|
|
|
}
|
2015-07-27 16:08:33 +06:00
|
|
|
const CGFloat pinchVelocityDividerFactor = 20.0f; // TODO: calibrate or make this component's property
|
|
|
|
NSError *error = nil;
|
|
|
|
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
|
|
|
|
if ([device lockForConfiguration:&error]) {
|
|
|
|
CGFloat zoomFactor = device.videoZoomFactor + atan(velocity / pinchVelocityDividerFactor);
|
2016-02-24 13:16:08 -05:00
|
|
|
if (zoomFactor > device.activeFormat.videoMaxZoomFactor) {
|
|
|
|
zoomFactor = device.activeFormat.videoMaxZoomFactor;
|
|
|
|
} else if (zoomFactor < 1) {
|
|
|
|
zoomFactor = 1.0f;
|
|
|
|
}
|
2015-11-30 14:48:00 -08:00
|
|
|
|
2015-08-11 19:23:06 +06:00
|
|
|
NSDictionary *event = @{
|
2016-01-31 19:27:46 -08:00
|
|
|
@"target": reactTag,
|
|
|
|
@"zoomFactor": [NSNumber numberWithDouble:zoomFactor],
|
|
|
|
@"velocity": [NSNumber numberWithDouble:velocity]
|
|
|
|
};
|
2016-04-03 11:19:31 -07:00
|
|
|
|
2015-08-11 19:23:06 +06:00
|
|
|
[self.bridge.eventDispatcher sendInputEventWithName:@"zoomChanged" body:event];
|
|
|
|
|
2015-11-30 14:48:00 -08:00
|
|
|
device.videoZoomFactor = zoomFactor;
|
2015-07-27 16:08:33 +06:00
|
|
|
[device unlockForConfiguration];
|
|
|
|
} else {
|
|
|
|
NSLog(@"error: %@", error);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-20 21:31:23 +02:00
|
|
|
- (void)setCaptureQuality:(NSString *)quality
|
|
|
|
{
|
2016-11-18 17:45:21 -05:00
|
|
|
#if !(TARGET_IPHONE_SIMULATOR)
|
|
|
|
if (quality) {
|
|
|
|
[self.session beginConfiguration];
|
|
|
|
if ([self.session canSetSessionPreset:quality]) {
|
|
|
|
self.session.sessionPreset = quality;
|
|
|
|
}
|
|
|
|
[self.session commitConfiguration];
|
2016-03-20 21:31:23 +02:00
|
|
|
}
|
2016-11-18 17:45:21 -05:00
|
|
|
#endif
|
2016-03-20 21:31:23 +02:00
|
|
|
}
|
|
|
|
|
2015-03-31 18:02:57 -07:00
|
|
|
@end
|