implemented method getFOV for ios

This commit is contained in:
WellDone2044 2015-11-02 15:08:17 +01:00
parent edbbcd75a3
commit 2462fb8ada
1 changed files with 44 additions and 12 deletions

View File

@ -232,16 +232,48 @@ RCT_EXPORT_METHOD(stopCapture) {
}
}
RCT_EXPORT_METHOD(getFOV: (RCTResponseSenderBlock)callback) {
NSArray *devices = [AVCaptureDevice devices];
AVCaptureDevice *frontCamera;
AVCaptureDevice *backCamera;
double frontFov;
double backFov;
for (AVCaptureDevice *device in devices) {
NSLog(@"Device name: %@", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(@"Device position : back");
backCamera = device;
backFov = backCamera.activeFormat.videoFieldOfView;
}
else {
NSLog(@"Device position : front");
frontCamera = device;
frontFov = frontCamera.activeFormat.videoFieldOfView;
}
}
}
callback(@[[NSNull null], @{
@"backCamera": [NSNumber numberWithDouble: backFov],
@"frontCamera": [NSNumber numberWithDouble: frontFov]
}]);
}
- (void)startSession {
#if TARGET_IPHONE_SIMULATOR
return;
#endif
dispatch_async(self.sessionQueue, ^{
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
self.presetCamera = AVCaptureDevicePositionBack;
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([self.session canAddOutput:stillImageOutput])
{
@ -249,14 +281,14 @@ RCT_EXPORT_METHOD(stopCapture) {
[self.session addOutput:stillImageOutput];
self.stillImageOutput = stillImageOutput;
}
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([self.session canAddOutput:movieFileOutput])
{
[self.session addOutput:movieFileOutput];
self.movieFileOutput = movieFileOutput;
}
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
if ([self.session canAddOutput:metadataOutput]) {
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
@ -264,7 +296,7 @@ RCT_EXPORT_METHOD(stopCapture) {
[metadataOutput setMetadataObjectTypes:metadataOutput.availableMetadataObjectTypes];
self.metadataOutput = metadataOutput;
}
__weak RCTCameraManager *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
RCTCameraManager *strongSelf = weakSelf;
@ -273,7 +305,7 @@ RCT_EXPORT_METHOD(stopCapture) {
[strongSelf.session startRunning];
});
}]];
[self.session startRunning];
});
}
@ -282,7 +314,7 @@ RCT_EXPORT_METHOD(stopCapture) {
#if TARGET_IPHONE_SIMULATOR
return;
#endif
dispatch_async(self.sessionQueue, ^{
self.camera = nil;
[self.previewLayer removeFromSuperlayer];
@ -290,7 +322,7 @@ RCT_EXPORT_METHOD(stopCapture) {
for(AVCaptureInput *input in self.session.inputs) {
[self.session removeInput:input];
}
for(AVCaptureOutput *output in self.session.outputs) {
[self.session removeOutput:output];
}
@ -299,19 +331,19 @@ RCT_EXPORT_METHOD(stopCapture) {
- (void)initializeCaptureSessionInput:(NSString *)type {
dispatch_async(self.sessionQueue, ^{
[self.session beginConfiguration];
NSError *error = nil;
AVCaptureDevice *captureDevice;
if (type == AVMediaTypeAudio) {
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
}
else if (type == AVMediaTypeVideo) {
captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
}
if (captureDevice == nil) {
return;
}