I believe this will fix issues #52 and #58. There are a lot of changes here due to a whitespace formatting change.
This commit is contained in:
parent
87a8389c08
commit
65b5033596
|
@ -27,6 +27,7 @@ var Camera = React.createClass({
|
|||
PropTypes.string,
|
||||
PropTypes.number
|
||||
]),
|
||||
captureAudio: PropTypes.bool,
|
||||
captureMode: PropTypes.oneOfType([
|
||||
PropTypes.string,
|
||||
PropTypes.number
|
||||
|
@ -65,6 +66,7 @@ var Camera = React.createClass({
|
|||
aspect: constants.Aspect.fill,
|
||||
type: constants.Type.back,
|
||||
orientation: constants.Orientation.auto,
|
||||
captureAudio: true,
|
||||
captureMode: constants.CaptureMode.still,
|
||||
captureTarget: constants.CaptureTarget.memory,
|
||||
flashMode: constants.FlashMode.off,
|
||||
|
@ -185,6 +187,7 @@ var Camera = React.createClass({
|
|||
}
|
||||
|
||||
options = Object.assign({}, {
|
||||
audio: this.props.captureAudio,
|
||||
mode: this.props.captureMode,
|
||||
target: this.props.captureTarget
|
||||
}, options);
|
||||
|
@ -207,8 +210,10 @@ var Camera = React.createClass({
|
|||
},
|
||||
|
||||
stopCapture() {
|
||||
this.setState({ isRecording: false });
|
||||
NativeModules.CameraManager.stopCapture();
|
||||
if (this.state.isRecording) {
|
||||
NativeModules.CameraManager.stopCapture();
|
||||
this.setState({ isRecording: false });
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
|
92
RCTCamera.m
92
RCTCamera.m
|
@ -10,81 +10,83 @@
|
|||
|
||||
- (void)setAspect:(NSInteger)aspect
|
||||
{
|
||||
NSString *aspectString;
|
||||
switch (aspect) {
|
||||
default:
|
||||
case RCTCameraAspectFill:
|
||||
aspectString = AVLayerVideoGravityResizeAspectFill;
|
||||
break;
|
||||
case RCTCameraAspectFit:
|
||||
aspectString = AVLayerVideoGravityResizeAspect;
|
||||
break;
|
||||
case RCTCameraAspectStretch:
|
||||
aspectString = AVLayerVideoGravityResize;
|
||||
break;
|
||||
}
|
||||
[self.manager changeAspect:aspectString];
|
||||
NSString *aspectString;
|
||||
switch (aspect) {
|
||||
default:
|
||||
case RCTCameraAspectFill:
|
||||
aspectString = AVLayerVideoGravityResizeAspectFill;
|
||||
break;
|
||||
case RCTCameraAspectFit:
|
||||
aspectString = AVLayerVideoGravityResizeAspect;
|
||||
break;
|
||||
case RCTCameraAspectStretch:
|
||||
aspectString = AVLayerVideoGravityResize;
|
||||
break;
|
||||
}
|
||||
[self.manager changeAspect:aspectString];
|
||||
}
|
||||
|
||||
- (void)setType:(NSInteger)type
|
||||
{
|
||||
if (self.manager.session.isRunning) {
|
||||
[self.manager changeCamera:type];
|
||||
}
|
||||
else {
|
||||
self.manager.presetCamera = type;
|
||||
}
|
||||
if (self.manager.session.isRunning) {
|
||||
[self.manager changeCamera:type];
|
||||
}
|
||||
else {
|
||||
self.manager.presetCamera = type;
|
||||
}
|
||||
[self.manager initializeCaptureSessionInput:AVMediaTypeVideo];
|
||||
}
|
||||
|
||||
- (void)setOrientation:(NSInteger)orientation
|
||||
{
|
||||
if (orientation == RCTCameraOrientationAuto) {
|
||||
[self.manager changeOrientation:[UIApplication sharedApplication].statusBarOrientation];
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(orientationChanged:) name:UIDeviceOrientationDidChangeNotification object:nil];
|
||||
}
|
||||
else {
|
||||
[[NSNotificationCenter defaultCenter]removeObserver:self name:UIDeviceOrientationDidChangeNotification object:nil];
|
||||
[self.manager changeOrientation:orientation];
|
||||
}
|
||||
if (orientation == RCTCameraOrientationAuto) {
|
||||
[self.manager changeOrientation:[UIApplication sharedApplication].statusBarOrientation];
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(orientationChanged:) name:UIDeviceOrientationDidChangeNotification object:nil];
|
||||
}
|
||||
else {
|
||||
[[NSNotificationCenter defaultCenter]removeObserver:self name:UIDeviceOrientationDidChangeNotification object:nil];
|
||||
[self.manager changeOrientation:orientation];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setFlashMode:(NSInteger)flashMode
|
||||
{
|
||||
[self.manager changeFlashMode:flashMode];
|
||||
[self.manager changeFlashMode:flashMode];
|
||||
}
|
||||
|
||||
- (void)setTorchMode:(NSInteger)torchMode
|
||||
{
|
||||
[self.manager changeTorchMode:torchMode];
|
||||
[self.manager changeTorchMode:torchMode];
|
||||
}
|
||||
|
||||
- (id)initWithManager:(RCTCameraManager*)manager
|
||||
{
|
||||
|
||||
if ((self = [super init])) {
|
||||
self.manager = manager;
|
||||
}
|
||||
return self;
|
||||
|
||||
if ((self = [super init])) {
|
||||
self.manager = manager;
|
||||
[self.manager initializeCaptureSessionInput:AVMediaTypeVideo];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)layoutSubviews
|
||||
{
|
||||
[super layoutSubviews];
|
||||
self.manager.previewLayer.frame = self.bounds;
|
||||
[self setBackgroundColor:[UIColor blackColor]];
|
||||
[self.layer insertSublayer:self.manager.previewLayer atIndex:0];
|
||||
[super layoutSubviews];
|
||||
self.manager.previewLayer.frame = self.bounds;
|
||||
[self setBackgroundColor:[UIColor blackColor]];
|
||||
[self.layer insertSublayer:self.manager.previewLayer atIndex:0];
|
||||
}
|
||||
|
||||
- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex
|
||||
{
|
||||
[self insertSubview:view atIndex:atIndex + 1];
|
||||
return;
|
||||
[self insertSubview:view atIndex:atIndex + 1];
|
||||
return;
|
||||
}
|
||||
|
||||
- (void)removeReactSubview:(UIView *)subview
|
||||
{
|
||||
[subview removeFromSuperview];
|
||||
return;
|
||||
[subview removeFromSuperview];
|
||||
return;
|
||||
}
|
||||
|
||||
- (void)removeFromSuperview
|
||||
|
@ -94,8 +96,8 @@
|
|||
}
|
||||
|
||||
- (void)orientationChanged:(NSNotification *)notification{
|
||||
UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
|
||||
[self.manager changeOrientation:orientation];
|
||||
UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
|
||||
[self.manager changeOrientation:orientation];
|
||||
}
|
||||
|
||||
@end
|
||||
|
|
|
@ -4,52 +4,53 @@
|
|||
@class RCTCamera;
|
||||
|
||||
typedef NS_ENUM(NSInteger, RCTCameraAspect) {
|
||||
RCTCameraAspectFill = 0,
|
||||
RCTCameraAspectFit = 1,
|
||||
RCTCameraAspectStretch = 2
|
||||
RCTCameraAspectFill = 0,
|
||||
RCTCameraAspectFit = 1,
|
||||
RCTCameraAspectStretch = 2
|
||||
};
|
||||
|
||||
typedef NS_ENUM(NSInteger, RCTCameraCaptureMode) {
|
||||
RCTCameraCaptureModeStill = 0,
|
||||
RCTCameraCaptureModeVideo = 1
|
||||
RCTCameraCaptureModeStill = 0,
|
||||
RCTCameraCaptureModeVideo = 1
|
||||
};
|
||||
|
||||
typedef NS_ENUM(NSInteger, RCTCameraCaptureTarget) {
|
||||
RCTCameraCaptureTargetMemory = 0,
|
||||
RCTCameraCaptureTargetDisk = 1,
|
||||
RCTCameraCaptureTargetCameraRoll = 2
|
||||
RCTCameraCaptureTargetMemory = 0,
|
||||
RCTCameraCaptureTargetDisk = 1,
|
||||
RCTCameraCaptureTargetCameraRoll = 2
|
||||
};
|
||||
|
||||
typedef NS_ENUM(NSInteger, RCTCameraOrientation) {
|
||||
RCTCameraOrientationAuto = 0,
|
||||
RCTCameraOrientationLandscapeLeft = AVCaptureVideoOrientationLandscapeLeft,
|
||||
RCTCameraOrientationLandscapeRight = AVCaptureVideoOrientationLandscapeRight,
|
||||
RCTCameraOrientationPortrait = AVCaptureVideoOrientationPortrait,
|
||||
RCTCameraOrientationPortraitUpsideDown = AVCaptureVideoOrientationPortraitUpsideDown
|
||||
RCTCameraOrientationAuto = 0,
|
||||
RCTCameraOrientationLandscapeLeft = AVCaptureVideoOrientationLandscapeLeft,
|
||||
RCTCameraOrientationLandscapeRight = AVCaptureVideoOrientationLandscapeRight,
|
||||
RCTCameraOrientationPortrait = AVCaptureVideoOrientationPortrait,
|
||||
RCTCameraOrientationPortraitUpsideDown = AVCaptureVideoOrientationPortraitUpsideDown
|
||||
};
|
||||
|
||||
typedef NS_ENUM(NSInteger, RCTCameraType) {
|
||||
RCTCameraTypeFront = AVCaptureDevicePositionFront,
|
||||
RCTCameraTypeBack = AVCaptureDevicePositionBack
|
||||
RCTCameraTypeFront = AVCaptureDevicePositionFront,
|
||||
RCTCameraTypeBack = AVCaptureDevicePositionBack
|
||||
};
|
||||
|
||||
typedef NS_ENUM(NSInteger, RCTCameraFlashMode) {
|
||||
RCTCameraFlashModeOff = AVCaptureFlashModeOff,
|
||||
RCTCameraFlashModeOn = AVCaptureFlashModeOn,
|
||||
RCTCameraFlashModeAuto = AVCaptureFlashModeAuto
|
||||
RCTCameraFlashModeOff = AVCaptureFlashModeOff,
|
||||
RCTCameraFlashModeOn = AVCaptureFlashModeOn,
|
||||
RCTCameraFlashModeAuto = AVCaptureFlashModeAuto
|
||||
};
|
||||
|
||||
typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
|
||||
RCTCameraTorchModeOff = AVCaptureTorchModeOff,
|
||||
RCTCameraTorchModeOn = AVCaptureTorchModeOn,
|
||||
RCTCameraTorchModeAuto = AVCaptureTorchModeAuto
|
||||
RCTCameraTorchModeOff = AVCaptureTorchModeOff,
|
||||
RCTCameraTorchModeOn = AVCaptureTorchModeOn,
|
||||
RCTCameraTorchModeAuto = AVCaptureTorchModeAuto
|
||||
};
|
||||
|
||||
@interface RCTCameraManager : RCTViewManager<AVCaptureMetadataOutputObjectsDelegate, AVCaptureFileOutputRecordingDelegate>
|
||||
|
||||
@property (nonatomic) dispatch_queue_t sessionQueue;
|
||||
@property (nonatomic) AVCaptureSession *session;
|
||||
@property (nonatomic) AVCaptureDeviceInput *captureDeviceInput;
|
||||
@property (nonatomic) AVCaptureDeviceInput *audioCaptureDeviceInput;
|
||||
@property (nonatomic) AVCaptureDeviceInput *videoCaptureDeviceInput;
|
||||
@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
|
||||
@property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
|
||||
@property (nonatomic) AVCaptureMetadataOutput *metadataOutput;
|
||||
|
@ -66,6 +67,7 @@ typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
|
|||
- (void)changeTorchMode:(NSInteger)torchMode;
|
||||
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position;
|
||||
- (void)capture:(NSDictionary*)options callback:(RCTResponseSenderBlock)callback;
|
||||
- (void)initializeCaptureSessionInput:(NSString*)type;
|
||||
- (void)stopCapture;
|
||||
|
||||
@end
|
||||
|
|
|
@ -15,7 +15,7 @@ RCT_EXPORT_MODULE();
|
|||
|
||||
- (UIView *)view
|
||||
{
|
||||
return [[RCTCamera alloc] initWithManager:self];
|
||||
return [[RCTCamera alloc] initWithManager:self];
|
||||
}
|
||||
|
||||
RCT_EXPORT_VIEW_PROPERTY(aspect, NSInteger);
|
||||
|
@ -26,332 +26,351 @@ RCT_EXPORT_VIEW_PROPERTY(torchMode, NSInteger);
|
|||
|
||||
- (NSDictionary *)constantsToExport
|
||||
{
|
||||
return @{
|
||||
@"Aspect": @{
|
||||
@"stretch": @(RCTCameraAspectStretch),
|
||||
@"fit": @(RCTCameraAspectFit),
|
||||
@"fill": @(RCTCameraAspectFill)
|
||||
},
|
||||
@"BarCodeType": @{
|
||||
@"upce": AVMetadataObjectTypeUPCECode,
|
||||
@"code39": AVMetadataObjectTypeCode39Code,
|
||||
@"code39mod43": AVMetadataObjectTypeCode39Mod43Code,
|
||||
@"ean13": AVMetadataObjectTypeEAN13Code,
|
||||
@"ean8": AVMetadataObjectTypeEAN8Code,
|
||||
@"code93": AVMetadataObjectTypeCode93Code,
|
||||
@"code138": AVMetadataObjectTypeCode128Code,
|
||||
@"pdf417": AVMetadataObjectTypePDF417Code,
|
||||
@"qr": AVMetadataObjectTypeQRCode,
|
||||
@"aztec": AVMetadataObjectTypeAztecCode
|
||||
},
|
||||
@"Type": @{
|
||||
@"front": @(RCTCameraTypeFront),
|
||||
@"back": @(RCTCameraTypeBack)
|
||||
},
|
||||
@"CaptureMode": @{
|
||||
@"still": @(RCTCameraCaptureModeStill),
|
||||
@"video": @(RCTCameraCaptureModeVideo)
|
||||
},
|
||||
@"CaptureTarget": @{
|
||||
@"memory": @(RCTCameraCaptureTargetMemory),
|
||||
@"disk": @(RCTCameraCaptureTargetDisk),
|
||||
@"cameraRoll": @(RCTCameraCaptureTargetCameraRoll)
|
||||
},
|
||||
@"Orientation": @{
|
||||
@"auto": @(RCTCameraOrientationAuto),
|
||||
@"landscapeLeft": @(RCTCameraOrientationLandscapeLeft),
|
||||
@"landscapeRight": @(RCTCameraOrientationLandscapeRight),
|
||||
@"portrait": @(RCTCameraOrientationPortrait),
|
||||
@"portraitUpsideDown": @(RCTCameraOrientationPortraitUpsideDown)
|
||||
},
|
||||
@"FlashMode": @{
|
||||
@"off": @(RCTCameraFlashModeOff),
|
||||
@"on": @(RCTCameraFlashModeOn),
|
||||
@"auto": @(RCTCameraFlashModeAuto)
|
||||
},
|
||||
@"TorchMode": @{
|
||||
@"off": @(RCTCameraTorchModeOff),
|
||||
@"on": @(RCTCameraTorchModeOn),
|
||||
@"auto": @(RCTCameraTorchModeAuto)
|
||||
}
|
||||
};
|
||||
return @{
|
||||
@"Aspect": @{
|
||||
@"stretch": @(RCTCameraAspectStretch),
|
||||
@"fit": @(RCTCameraAspectFit),
|
||||
@"fill": @(RCTCameraAspectFill)
|
||||
},
|
||||
@"BarCodeType": @{
|
||||
@"upce": AVMetadataObjectTypeUPCECode,
|
||||
@"code39": AVMetadataObjectTypeCode39Code,
|
||||
@"code39mod43": AVMetadataObjectTypeCode39Mod43Code,
|
||||
@"ean13": AVMetadataObjectTypeEAN13Code,
|
||||
@"ean8": AVMetadataObjectTypeEAN8Code,
|
||||
@"code93": AVMetadataObjectTypeCode93Code,
|
||||
@"code138": AVMetadataObjectTypeCode128Code,
|
||||
@"pdf417": AVMetadataObjectTypePDF417Code,
|
||||
@"qr": AVMetadataObjectTypeQRCode,
|
||||
@"aztec": AVMetadataObjectTypeAztecCode
|
||||
},
|
||||
@"Type": @{
|
||||
@"front": @(RCTCameraTypeFront),
|
||||
@"back": @(RCTCameraTypeBack)
|
||||
},
|
||||
@"CaptureMode": @{
|
||||
@"still": @(RCTCameraCaptureModeStill),
|
||||
@"video": @(RCTCameraCaptureModeVideo)
|
||||
},
|
||||
@"CaptureTarget": @{
|
||||
@"memory": @(RCTCameraCaptureTargetMemory),
|
||||
@"disk": @(RCTCameraCaptureTargetDisk),
|
||||
@"cameraRoll": @(RCTCameraCaptureTargetCameraRoll)
|
||||
},
|
||||
@"Orientation": @{
|
||||
@"auto": @(RCTCameraOrientationAuto),
|
||||
@"landscapeLeft": @(RCTCameraOrientationLandscapeLeft),
|
||||
@"landscapeRight": @(RCTCameraOrientationLandscapeRight),
|
||||
@"portrait": @(RCTCameraOrientationPortrait),
|
||||
@"portraitUpsideDown": @(RCTCameraOrientationPortraitUpsideDown)
|
||||
},
|
||||
@"FlashMode": @{
|
||||
@"off": @(RCTCameraFlashModeOff),
|
||||
@"on": @(RCTCameraFlashModeOn),
|
||||
@"auto": @(RCTCameraFlashModeAuto)
|
||||
},
|
||||
@"TorchMode": @{
|
||||
@"off": @(RCTCameraTorchModeOff),
|
||||
@"on": @(RCTCameraTorchModeOn),
|
||||
@"auto": @(RCTCameraTorchModeAuto)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
- (id)init {
|
||||
|
||||
if ((self = [super init])) {
|
||||
|
||||
self.session = [AVCaptureSession new];
|
||||
self.session.sessionPreset = AVCaptureSessionPresetHigh;
|
||||
|
||||
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
|
||||
self.previewLayer.needsDisplayOnBoundsChange = YES;
|
||||
|
||||
self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
|
||||
|
||||
dispatch_async(self.sessionQueue, ^{
|
||||
NSError *error = nil;
|
||||
|
||||
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
|
||||
self.presetCamera = AVCaptureDevicePositionBack;
|
||||
}
|
||||
|
||||
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
|
||||
if (captureDevice != nil) {
|
||||
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
|
||||
|
||||
if (error)
|
||||
{
|
||||
NSLog(@"%@", error);
|
||||
}
|
||||
|
||||
if ([self.session canAddInput:captureDeviceInput])
|
||||
{
|
||||
[self.session addInput:captureDeviceInput];
|
||||
self.captureDeviceInput = captureDeviceInput;
|
||||
}
|
||||
}
|
||||
|
||||
AVCaptureDevice *audioCaptureDevice = [self deviceWithMediaType:AVMediaTypeAudio preferringPosition:self.presetCamera];
|
||||
if (audioCaptureDevice != nil) {
|
||||
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
|
||||
|
||||
if (error)
|
||||
{
|
||||
NSLog(@"%@", error);
|
||||
}
|
||||
|
||||
if ([self.session canAddInput:audioInput])
|
||||
{
|
||||
[self.session addInput:audioInput];
|
||||
}
|
||||
}
|
||||
|
||||
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
||||
if ([self.session canAddOutput:stillImageOutput])
|
||||
{
|
||||
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
|
||||
[self.session addOutput:stillImageOutput];
|
||||
self.stillImageOutput = stillImageOutput;
|
||||
}
|
||||
|
||||
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
||||
if ([self.session canAddOutput:movieFileOutput])
|
||||
{
|
||||
[self.session addOutput:movieFileOutput];
|
||||
self.movieFileOutput = movieFileOutput;
|
||||
}
|
||||
|
||||
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
|
||||
if ([self.session canAddOutput:metadataOutput]) {
|
||||
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
|
||||
[self.session addOutput:metadataOutput];
|
||||
[metadataOutput setMetadataObjectTypes:metadataOutput.availableMetadataObjectTypes];
|
||||
self.metadataOutput = metadataOutput;
|
||||
}
|
||||
|
||||
__weak RCTCameraManager *weakSelf = self;
|
||||
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
|
||||
RCTCameraManager *strongSelf = weakSelf;
|
||||
dispatch_async(strongSelf.sessionQueue, ^{
|
||||
// Manually restarting the session since it must have been stopped due to an error.
|
||||
[strongSelf.session startRunning];
|
||||
});
|
||||
}]];
|
||||
|
||||
[self.session startRunning];
|
||||
|
||||
if ((self = [super init])) {
|
||||
|
||||
self.session = [AVCaptureSession new];
|
||||
self.session.sessionPreset = AVCaptureSessionPresetHigh;
|
||||
|
||||
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
|
||||
self.previewLayer.needsDisplayOnBoundsChange = YES;
|
||||
|
||||
self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
|
||||
|
||||
dispatch_async(self.sessionQueue, ^{
|
||||
|
||||
|
||||
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
|
||||
self.presetCamera = AVCaptureDevicePositionBack;
|
||||
}
|
||||
|
||||
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
||||
if ([self.session canAddOutput:stillImageOutput])
|
||||
{
|
||||
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
|
||||
[self.session addOutput:stillImageOutput];
|
||||
self.stillImageOutput = stillImageOutput;
|
||||
}
|
||||
|
||||
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
||||
if ([self.session canAddOutput:movieFileOutput])
|
||||
{
|
||||
[self.session addOutput:movieFileOutput];
|
||||
self.movieFileOutput = movieFileOutput;
|
||||
}
|
||||
|
||||
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
|
||||
if ([self.session canAddOutput:metadataOutput]) {
|
||||
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
|
||||
[self.session addOutput:metadataOutput];
|
||||
[metadataOutput setMetadataObjectTypes:metadataOutput.availableMetadataObjectTypes];
|
||||
self.metadataOutput = metadataOutput;
|
||||
}
|
||||
|
||||
__weak RCTCameraManager *weakSelf = self;
|
||||
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
|
||||
RCTCameraManager *strongSelf = weakSelf;
|
||||
dispatch_async(strongSelf.sessionQueue, ^{
|
||||
// Manually restarting the session since it must have been stopped due to an error.
|
||||
[strongSelf.session startRunning];
|
||||
});
|
||||
}
|
||||
return self;
|
||||
}]];
|
||||
|
||||
[self.session startRunning];
|
||||
});
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
RCT_EXPORT_METHOD(checkDeviceAuthorizationStatus:(RCTResponseSenderBlock) callback)
|
||||
{
|
||||
NSString *mediaType = AVMediaTypeVideo;
|
||||
|
||||
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
|
||||
callback(@[[NSNull null], @(granted)]);
|
||||
}];
|
||||
NSString *mediaType = AVMediaTypeVideo;
|
||||
|
||||
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
|
||||
callback(@[[NSNull null], @(granted)]);
|
||||
}];
|
||||
}
|
||||
|
||||
RCT_EXPORT_METHOD(changeCamera:(NSInteger)camera) {
|
||||
AVCaptureDevice *currentCaptureDevice = [self.captureDeviceInput device];
|
||||
dispatch_async(self.sessionQueue, ^{
|
||||
AVCaptureDevice *currentCaptureDevice = [self.videoCaptureDeviceInput device];
|
||||
AVCaptureDevicePosition position = (AVCaptureDevicePosition)camera;
|
||||
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
|
||||
|
||||
|
||||
if (captureDevice == nil) {
|
||||
return;
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
self.presetCamera = camera;
|
||||
|
||||
NSError *error = nil;
|
||||
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
|
||||
|
||||
if (error)
|
||||
|
||||
if (error || captureDeviceInput == nil)
|
||||
{
|
||||
NSLog(@"%@", error);
|
||||
return;
|
||||
NSLog(@"%@", error);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
[self.session beginConfiguration];
|
||||
|
||||
[self.session removeInput:self.captureDeviceInput];
|
||||
|
||||
|
||||
[self.session removeInput:self.videoCaptureDeviceInput];
|
||||
|
||||
if ([self.session canAddInput:captureDeviceInput])
|
||||
{
|
||||
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
|
||||
|
||||
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
|
||||
[self.session addInput:captureDeviceInput];
|
||||
self.captureDeviceInput = captureDeviceInput;
|
||||
[self.session addInput:captureDeviceInput];
|
||||
|
||||
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
|
||||
|
||||
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
|
||||
self.videoCaptureDeviceInput = captureDeviceInput;
|
||||
}
|
||||
else
|
||||
{
|
||||
[self.session addInput:self.captureDeviceInput];
|
||||
[self.session addInput:self.videoCaptureDeviceInput];
|
||||
}
|
||||
|
||||
|
||||
[self.session commitConfiguration];
|
||||
});
|
||||
}
|
||||
|
||||
RCT_EXPORT_METHOD(changeAspect:(NSString *)aspect) {
|
||||
self.previewLayer.videoGravity = aspect;
|
||||
self.previewLayer.videoGravity = aspect;
|
||||
}
|
||||
|
||||
RCT_EXPORT_METHOD(changeFlashMode:(NSInteger)flashMode) {
|
||||
AVCaptureDevice *device = [self.captureDeviceInput device];
|
||||
NSError *error = nil;
|
||||
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
|
||||
NSError *error = nil;
|
||||
|
||||
if (![device hasFlash]) return;
|
||||
if (![device lockForConfiguration:&error]) {
|
||||
NSLog(@"%@", error);
|
||||
return;
|
||||
}
|
||||
[self setFlashMode:flashMode forDevice:device];
|
||||
[device unlockForConfiguration];
|
||||
if (![device hasFlash]) return;
|
||||
if (![device lockForConfiguration:&error]) {
|
||||
NSLog(@"%@", error);
|
||||
return;
|
||||
}
|
||||
[self setFlashMode:flashMode forDevice:device];
|
||||
[device unlockForConfiguration];
|
||||
}
|
||||
|
||||
RCT_EXPORT_METHOD(changeOrientation:(NSInteger)orientation) {
|
||||
self.previewLayer.connection.videoOrientation = orientation;
|
||||
self.previewLayer.connection.videoOrientation = orientation;
|
||||
}
|
||||
|
||||
RCT_EXPORT_METHOD(changeTorchMode:(NSInteger)torchMode) {
|
||||
AVCaptureDevice *device = [self.captureDeviceInput device];
|
||||
NSError *error = nil;
|
||||
|
||||
if (![device hasTorch]) return;
|
||||
if (![device lockForConfiguration:&error]) {
|
||||
NSLog(@"%@", error);
|
||||
return;
|
||||
}
|
||||
[device setTorchMode: torchMode];
|
||||
[device unlockForConfiguration];
|
||||
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
|
||||
NSError *error = nil;
|
||||
|
||||
if (![device hasTorch]) return;
|
||||
if (![device lockForConfiguration:&error]) {
|
||||
NSLog(@"%@", error);
|
||||
return;
|
||||
}
|
||||
[device setTorchMode: torchMode];
|
||||
[device unlockForConfiguration];
|
||||
}
|
||||
|
||||
RCT_EXPORT_METHOD(capture:(NSDictionary *)options callback:(RCTResponseSenderBlock)callback) {
|
||||
NSInteger captureMode = [[options valueForKey:@"mode"] intValue];
|
||||
NSInteger captureTarget = [[options valueForKey:@"target"] intValue];
|
||||
|
||||
if (captureMode == RCTCameraCaptureModeStill) {
|
||||
[self captureStill:captureTarget callback:callback];
|
||||
}
|
||||
else if (captureMode == RCTCameraCaptureModeVideo) {
|
||||
if (self.movieFileOutput.recording) {
|
||||
callback(@[RCTMakeError(@"Already Recording", nil, nil)]);
|
||||
return;
|
||||
}
|
||||
|
||||
Float64 totalSeconds = [[options valueForKey:@"totalSeconds"] floatValue];
|
||||
if (totalSeconds > -1) {
|
||||
int32_t preferredTimeScale = [[options valueForKey:@"preferredTimeScale"] intValue];
|
||||
CMTime maxDuration = CMTimeMakeWithSeconds(totalSeconds, preferredTimeScale);
|
||||
self.movieFileOutput.maxRecordedDuration = maxDuration;
|
||||
}
|
||||
|
||||
[self captureVideo:captureTarget callback:callback];
|
||||
}
|
||||
NSInteger captureMode = [[options valueForKey:@"mode"] intValue];
|
||||
NSInteger captureTarget = [[options valueForKey:@"target"] intValue];
|
||||
|
||||
if (captureMode == RCTCameraCaptureModeStill) {
|
||||
[self captureStill:captureTarget options:options callback:callback];
|
||||
}
|
||||
else if (captureMode == RCTCameraCaptureModeVideo) {
|
||||
[self captureVideo:captureTarget options:options callback:callback];
|
||||
}
|
||||
}
|
||||
|
||||
RCT_EXPORT_METHOD(stopCapture) {
|
||||
if (self.movieFileOutput.recording) {
|
||||
[self.movieFileOutput stopRecording];
|
||||
}
|
||||
if (self.movieFileOutput.recording) {
|
||||
[self.movieFileOutput stopRecording];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)captureStill:(NSInteger)target callback:(RCTResponseSenderBlock)callback {
|
||||
if ([[[UIDevice currentDevice].model lowercaseString] rangeOfString:@"simulator"].location != NSNotFound){
|
||||
|
||||
CGSize size = CGSizeMake(720, 1280);
|
||||
UIGraphicsBeginImageContextWithOptions(size, YES, 0);
|
||||
[[UIColor whiteColor] setFill];
|
||||
UIRectFill(CGRectMake(0, 0, size.width, size.height));
|
||||
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
||||
UIGraphicsEndImageContext();
|
||||
|
||||
[self storeImage:image target:target callback:callback];
|
||||
|
||||
} else {
|
||||
|
||||
[[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:self.previewLayer.connection.videoOrientation];
|
||||
|
||||
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
|
||||
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
|
||||
UIImage *image = [UIImage imageWithData:imageData];
|
||||
if (image)
|
||||
{
|
||||
[self storeImage:image target:target callback:callback];
|
||||
}
|
||||
else {
|
||||
callback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
}
|
||||
}];
|
||||
- (void)initializeCaptureSessionInput:(NSString *)type {
|
||||
dispatch_async(self.sessionQueue, ^{
|
||||
NSError *error = nil;
|
||||
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
|
||||
if (captureDevice == nil) {
|
||||
return;
|
||||
}
|
||||
|
||||
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
|
||||
|
||||
if (error || captureDeviceInput == nil) {
|
||||
NSLog(@"%@", error);
|
||||
return;
|
||||
}
|
||||
|
||||
[self.session beginConfiguration];
|
||||
|
||||
if (type == AVMediaTypeAudio) {
|
||||
[self.session removeInput:self.audioCaptureDeviceInput];
|
||||
}
|
||||
else if (type == AVMediaTypeVideo) {
|
||||
[self.session removeInput:self.videoCaptureDeviceInput];
|
||||
}
|
||||
|
||||
if ([self.session canAddInput:captureDeviceInput]) {
|
||||
[self.session addInput:captureDeviceInput];
|
||||
|
||||
if (type == AVMediaTypeAudio) {
|
||||
self.audioCaptureDeviceInput = captureDeviceInput;
|
||||
}
|
||||
else if (type == AVMediaTypeVideo) {
|
||||
self.videoCaptureDeviceInput = captureDeviceInput;
|
||||
}
|
||||
}
|
||||
|
||||
[self.session commitConfiguration];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)captureStill:(NSInteger)target options:(NSDictionary *)options callback:(RCTResponseSenderBlock)callback {
|
||||
if ([[[UIDevice currentDevice].model lowercaseString] rangeOfString:@"simulator"].location != NSNotFound){
|
||||
|
||||
CGSize size = CGSizeMake(720, 1280);
|
||||
UIGraphicsBeginImageContextWithOptions(size, YES, 0);
|
||||
[[UIColor whiteColor] setFill];
|
||||
UIRectFill(CGRectMake(0, 0, size.width, size.height));
|
||||
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
||||
UIGraphicsEndImageContext();
|
||||
|
||||
[self storeImage:image target:target callback:callback];
|
||||
|
||||
} else {
|
||||
|
||||
[[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:self.previewLayer.connection.videoOrientation];
|
||||
|
||||
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
|
||||
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
|
||||
UIImage *image = [UIImage imageWithData:imageData];
|
||||
if (image)
|
||||
{
|
||||
[self storeImage:image target:target callback:callback];
|
||||
}
|
||||
else {
|
||||
callback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
}
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)storeImage:(UIImage*)image target:(NSInteger)target callback:(RCTResponseSenderBlock)callback {
|
||||
UIImage *rotatedImage = [image resizedImage:CGSizeMake(image.size.width, image.size.height) interpolationQuality:kCGInterpolationDefault];
|
||||
|
||||
NSString *responseString;
|
||||
|
||||
if (target == RCTCameraCaptureTargetMemory) {
|
||||
responseString = [UIImageJPEGRepresentation(rotatedImage, 1.0) base64EncodedStringWithOptions:0];
|
||||
}
|
||||
else if (target == RCTCameraCaptureTargetDisk) {
|
||||
responseString = [self saveImage:rotatedImage withName:[[NSUUID UUID] UUIDString]];
|
||||
}
|
||||
else if (target == RCTCameraCaptureTargetCameraRoll) {
|
||||
[[[ALAssetsLibrary alloc] init] writeImageToSavedPhotosAlbum:rotatedImage.CGImage metadata:nil completionBlock:^(NSURL* url, NSError* error) {
|
||||
if (error == nil) {
|
||||
callback(@[[NSNull null], [url absoluteString]]);
|
||||
}
|
||||
else {
|
||||
callback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
}
|
||||
}];
|
||||
return;
|
||||
}
|
||||
callback(@[[NSNull null], responseString]);
|
||||
UIImage *rotatedImage = [image resizedImage:CGSizeMake(image.size.width, image.size.height) interpolationQuality:kCGInterpolationDefault];
|
||||
|
||||
NSString *responseString;
|
||||
|
||||
if (target == RCTCameraCaptureTargetMemory) {
|
||||
responseString = [UIImageJPEGRepresentation(rotatedImage, 1.0) base64EncodedStringWithOptions:0];
|
||||
}
|
||||
else if (target == RCTCameraCaptureTargetDisk) {
|
||||
responseString = [self saveImage:rotatedImage withName:[[NSUUID UUID] UUIDString]];
|
||||
}
|
||||
else if (target == RCTCameraCaptureTargetCameraRoll) {
|
||||
[[[ALAssetsLibrary alloc] init] writeImageToSavedPhotosAlbum:rotatedImage.CGImage metadata:nil completionBlock:^(NSURL* url, NSError* error) {
|
||||
if (error == nil) {
|
||||
callback(@[[NSNull null], [url absoluteString]]);
|
||||
}
|
||||
else {
|
||||
callback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
}
|
||||
}];
|
||||
return;
|
||||
}
|
||||
callback(@[[NSNull null], responseString]);
|
||||
}
|
||||
|
||||
-(void)captureVideo:(NSInteger)target callback:(RCTResponseSenderBlock)callback {
|
||||
-(void)captureVideo:(NSInteger)target options:(NSDictionary *)options callback:(RCTResponseSenderBlock)callback {
|
||||
|
||||
if (self.movieFileOutput.recording) {
|
||||
callback(@[RCTMakeError(@"Already Recording", nil, nil)]);
|
||||
return;
|
||||
}
|
||||
|
||||
if ([options valueForKey:@"audio"]) {
|
||||
[self initializeCaptureSessionInput:AVMediaTypeAudio];
|
||||
}
|
||||
|
||||
Float64 totalSeconds = [[options valueForKey:@"totalSeconds"] floatValue];
|
||||
if (totalSeconds > -1) {
|
||||
int32_t preferredTimeScale = [[options valueForKey:@"preferredTimeScale"] intValue];
|
||||
CMTime maxDuration = CMTimeMakeWithSeconds(totalSeconds, preferredTimeScale);
|
||||
self.movieFileOutput.maxRecordedDuration = maxDuration;
|
||||
}
|
||||
|
||||
dispatch_async(self.sessionQueue, ^{
|
||||
[[self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:self.previewLayer.connection.videoOrientation];
|
||||
|
||||
|
||||
//Create temporary URL to record to
|
||||
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
|
||||
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
|
||||
NSFileManager *fileManager = [NSFileManager defaultManager];
|
||||
if ([fileManager fileExistsAtPath:outputPath]) {
|
||||
NSError *error;
|
||||
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
|
||||
callback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
return;
|
||||
}
|
||||
NSError *error;
|
||||
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
|
||||
callback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//Start recording
|
||||
[self.movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
|
||||
|
||||
self.videoCallback = callback;
|
||||
self.videoTarget = target;
|
||||
});
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
|
||||
|
@ -359,172 +378,172 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
|
|||
fromConnections:(NSArray *)connections
|
||||
error:(NSError *)error
|
||||
{
|
||||
|
||||
BOOL recordSuccess = YES;
|
||||
if ([error code] != noErr) {
|
||||
// A problem occurred: Find out if the recording was successful.
|
||||
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
|
||||
if (value) {
|
||||
recordSuccess = [value boolValue];
|
||||
}
|
||||
}
|
||||
if (!recordSuccess) {
|
||||
self.videoCallback(@[RCTMakeError(@"Error while recording", nil, nil)]);
|
||||
return;
|
||||
}
|
||||
|
||||
if (self.videoTarget == RCTCameraCaptureTargetCameraRoll) {
|
||||
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
|
||||
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
|
||||
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
|
||||
BOOL recordSuccess = YES;
|
||||
if ([error code] != noErr) {
|
||||
// A problem occurred: Find out if the recording was successful.
|
||||
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
|
||||
if (value) {
|
||||
recordSuccess = [value boolValue];
|
||||
}
|
||||
}
|
||||
if (!recordSuccess) {
|
||||
self.videoCallback(@[RCTMakeError(@"Error while recording", nil, nil)]);
|
||||
return;
|
||||
}
|
||||
|
||||
if (self.videoTarget == RCTCameraCaptureTargetCameraRoll) {
|
||||
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
|
||||
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
|
||||
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
|
||||
completionBlock:^(NSURL *assetURL, NSError *error) {
|
||||
if (error) {
|
||||
self.videoCallback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
return;
|
||||
}
|
||||
if (error) {
|
||||
self.videoCallback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
return;
|
||||
}
|
||||
|
||||
self.videoCallback(@[[NSNull null], [assetURL absoluteString]]);
|
||||
}];
|
||||
}
|
||||
self.videoCallback(@[[NSNull null], [assetURL absoluteString]]);
|
||||
}];
|
||||
}
|
||||
else if (self.videoTarget == RCTCameraCaptureTargetDisk) {
|
||||
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||
NSString *documentsDirectory = [paths firstObject];
|
||||
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"mov"];
|
||||
}
|
||||
else if (self.videoTarget == RCTCameraCaptureTargetDisk) {
|
||||
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||
NSString *documentsDirectory = [paths firstObject];
|
||||
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"mov"];
|
||||
|
||||
NSFileManager * fileManager = [NSFileManager defaultManager];
|
||||
NSError * error = nil;
|
||||
NSFileManager * fileManager = [NSFileManager defaultManager];
|
||||
NSError * error = nil;
|
||||
|
||||
//copying destination
|
||||
if (!([fileManager copyItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
|
||||
self.videoCallback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
return;
|
||||
}
|
||||
self.videoCallback(@[[NSNull null], fullPath]);
|
||||
}
|
||||
else {
|
||||
self.videoCallback(@[RCTMakeError(@"Target not supported", nil, nil)]);
|
||||
//copying destination
|
||||
if (!([fileManager copyItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
|
||||
self.videoCallback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
return;
|
||||
}
|
||||
self.videoCallback(@[[NSNull null], fullPath]);
|
||||
}
|
||||
else {
|
||||
self.videoCallback(@[RCTMakeError(@"Target not supported", nil, nil)]);
|
||||
}
|
||||
}
|
||||
|
||||
- (NSString *)saveImage:(UIImage *)image withName:(NSString *)name {
|
||||
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||
NSString *documentsDirectory = [paths firstObject];
|
||||
|
||||
NSData *data = UIImageJPEGRepresentation(image, 1.0);
|
||||
NSFileManager *fileManager = [NSFileManager defaultManager];
|
||||
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:name] stringByAppendingPathExtension:@"jpg"];
|
||||
|
||||
[fileManager createFileAtPath:fullPath contents:data attributes:nil];
|
||||
return fullPath;
|
||||
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||
NSString *documentsDirectory = [paths firstObject];
|
||||
|
||||
NSData *data = UIImageJPEGRepresentation(image, 1.0);
|
||||
NSFileManager *fileManager = [NSFileManager defaultManager];
|
||||
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:name] stringByAppendingPathExtension:@"jpg"];
|
||||
|
||||
[fileManager createFileAtPath:fullPath contents:data attributes:nil];
|
||||
return fullPath;
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
|
||||
|
||||
NSArray *barcodeTypes = @[
|
||||
AVMetadataObjectTypeUPCECode,
|
||||
AVMetadataObjectTypeCode39Code,
|
||||
AVMetadataObjectTypeCode39Mod43Code,
|
||||
AVMetadataObjectTypeEAN13Code,
|
||||
AVMetadataObjectTypeEAN8Code,
|
||||
AVMetadataObjectTypeCode93Code,
|
||||
AVMetadataObjectTypeCode128Code,
|
||||
AVMetadataObjectTypePDF417Code,
|
||||
AVMetadataObjectTypeQRCode,
|
||||
AVMetadataObjectTypeAztecCode
|
||||
];
|
||||
|
||||
for (AVMetadataMachineReadableCodeObject *metadata in metadataObjects) {
|
||||
for (id barcodeType in barcodeTypes) {
|
||||
if (metadata.type == barcodeType) {
|
||||
|
||||
[self.bridge.eventDispatcher sendDeviceEventWithName:@"CameraBarCodeRead"
|
||||
body:@{
|
||||
@"type": metadata.type,
|
||||
@"data": metadata.stringValue,
|
||||
@"bounds": @{
|
||||
@"origin": @{
|
||||
@"x": [NSString stringWithFormat:@"%f", metadata.bounds.origin.x],
|
||||
@"y": [NSString stringWithFormat:@"%f", metadata.bounds.origin.y]
|
||||
},
|
||||
@"size": @{
|
||||
@"height": [NSString stringWithFormat:@"%f", metadata.bounds.size.height],
|
||||
@"width": [NSString stringWithFormat:@"%f", metadata.bounds.size.width],
|
||||
}
|
||||
}
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
NSArray *barcodeTypes = @[
|
||||
AVMetadataObjectTypeUPCECode,
|
||||
AVMetadataObjectTypeCode39Code,
|
||||
AVMetadataObjectTypeCode39Mod43Code,
|
||||
AVMetadataObjectTypeEAN13Code,
|
||||
AVMetadataObjectTypeEAN8Code,
|
||||
AVMetadataObjectTypeCode93Code,
|
||||
AVMetadataObjectTypeCode128Code,
|
||||
AVMetadataObjectTypePDF417Code,
|
||||
AVMetadataObjectTypeQRCode,
|
||||
AVMetadataObjectTypeAztecCode
|
||||
];
|
||||
|
||||
for (AVMetadataMachineReadableCodeObject *metadata in metadataObjects) {
|
||||
for (id barcodeType in barcodeTypes) {
|
||||
if (metadata.type == barcodeType) {
|
||||
|
||||
[self.bridge.eventDispatcher sendDeviceEventWithName:@"CameraBarCodeRead"
|
||||
body:@{
|
||||
@"type": metadata.type,
|
||||
@"data": metadata.stringValue,
|
||||
@"bounds": @{
|
||||
@"origin": @{
|
||||
@"x": [NSString stringWithFormat:@"%f", metadata.bounds.origin.x],
|
||||
@"y": [NSString stringWithFormat:@"%f", metadata.bounds.origin.y]
|
||||
},
|
||||
@"size": @{
|
||||
@"height": [NSString stringWithFormat:@"%f", metadata.bounds.size.height],
|
||||
@"width": [NSString stringWithFormat:@"%f", metadata.bounds.size.width],
|
||||
}
|
||||
}
|
||||
}];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
|
||||
{
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
|
||||
AVCaptureDevice *captureDevice = [devices firstObject];
|
||||
|
||||
for (AVCaptureDevice *device in devices)
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
|
||||
AVCaptureDevice *captureDevice = [devices firstObject];
|
||||
|
||||
for (AVCaptureDevice *device in devices)
|
||||
{
|
||||
if ([device position] == position)
|
||||
{
|
||||
if ([device position] == position)
|
||||
{
|
||||
captureDevice = device;
|
||||
break;
|
||||
}
|
||||
captureDevice = device;
|
||||
break;
|
||||
}
|
||||
|
||||
return captureDevice;
|
||||
}
|
||||
|
||||
return captureDevice;
|
||||
}
|
||||
|
||||
|
||||
- (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
|
||||
{
|
||||
if (device.hasFlash && [device isFlashModeSupported:flashMode])
|
||||
if (device.hasFlash && [device isFlashModeSupported:flashMode])
|
||||
{
|
||||
NSError *error = nil;
|
||||
if ([device lockForConfiguration:&error])
|
||||
{
|
||||
NSError *error = nil;
|
||||
if ([device lockForConfiguration:&error])
|
||||
{
|
||||
[device setFlashMode:flashMode];
|
||||
[device unlockForConfiguration];
|
||||
}
|
||||
else
|
||||
{
|
||||
NSLog(@"%@", error);
|
||||
}
|
||||
[device setFlashMode:flashMode];
|
||||
[device unlockForConfiguration];
|
||||
}
|
||||
else
|
||||
{
|
||||
NSLog(@"%@", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)subjectAreaDidChange:(NSNotification *)notification
|
||||
{
|
||||
CGPoint devicePoint = CGPointMake(.5, .5);
|
||||
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
|
||||
CGPoint devicePoint = CGPointMake(.5, .5);
|
||||
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
|
||||
}
|
||||
|
||||
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
|
||||
{
|
||||
dispatch_async([self sessionQueue], ^{
|
||||
AVCaptureDevice *device = [[self captureDeviceInput] device];
|
||||
NSError *error = nil;
|
||||
if ([device lockForConfiguration:&error])
|
||||
{
|
||||
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
|
||||
{
|
||||
[device setFocusMode:focusMode];
|
||||
[device setFocusPointOfInterest:point];
|
||||
}
|
||||
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
|
||||
{
|
||||
[device setExposureMode:exposureMode];
|
||||
[device setExposurePointOfInterest:point];
|
||||
}
|
||||
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
|
||||
[device unlockForConfiguration];
|
||||
}
|
||||
else
|
||||
{
|
||||
NSLog(@"%@", error);
|
||||
}
|
||||
});
|
||||
dispatch_async([self sessionQueue], ^{
|
||||
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
|
||||
NSError *error = nil;
|
||||
if ([device lockForConfiguration:&error])
|
||||
{
|
||||
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
|
||||
{
|
||||
[device setFocusMode:focusMode];
|
||||
[device setFocusPointOfInterest:point];
|
||||
}
|
||||
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
|
||||
{
|
||||
[device setExposureMode:exposureMode];
|
||||
[device setExposurePointOfInterest:point];
|
||||
}
|
||||
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
|
||||
[device unlockForConfiguration];
|
||||
}
|
||||
else
|
||||
{
|
||||
NSLog(@"%@", error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
|
16
README.md
16
README.md
|
@ -112,6 +112,12 @@ Values: `Camera.constants.Aspect.fit` or `"fit"`, `Camera.constants.Aspect.fill`
|
|||
|
||||
The `aspect` property allows you to define how your viewfinder renders the camera's view. For instance, if you have a square viewfinder and you want to fill the it entirely, you have two options: `"fill"`, where the aspect ratio of the camera's view is preserved by cropping the view or `"stretch"`, where the aspect ratio is skewed in order to fit the entire image inside the viewfinder. The other option is `"fit"`, which ensures the camera's entire view fits inside your viewfinder without altering the aspect ratio.
|
||||
|
||||
#### `captureAudio`
|
||||
|
||||
Values: `true` (default), `false` (Boolean)
|
||||
|
||||
*Applies to video capture mode only.* Specifies whether or not audio should be captured with the video.
|
||||
|
||||
|
||||
#### `captureMode`
|
||||
|
||||
|
@ -173,7 +179,13 @@ You can access component methods by adding a `ref` (ie. `ref="camera"`) prop to
|
|||
|
||||
#### `capture([options,] callback)`
|
||||
|
||||
Captures data from the camera. What is captured is based on the `captureMode` and `captureTarget` props. `captureMode` tells the camera whether you want a still image or -- in the future, this is not currently supported -- video. `captureTarget` allows you to specify how you want the data to be captured and sent back to you. The available `target`s are `Camera.constants.CaptureTarget.memory` and `Camera.constants.CaptureTarget.disk` - the latter has been shown to dramatically improve camera performance.
|
||||
Captures data from the camera. What is captured is based on the `captureMode` and `captureTarget` props. `captureMode` tells the camera whether you want a still image or video. `captureTarget` allows you to specify how you want the data to be captured and sent back to you. The available `target`s are `Camera.constants.CaptureTarget.memory` and `Camera.constants.CaptureTarget.disk` - the latter has been shown to dramatically improve camera performance.
|
||||
|
||||
Supported options:
|
||||
|
||||
- `audio` (See `captureAudio` under Properties)
|
||||
- `mode` (See `captureMode` under Properties)
|
||||
- `target` (See `captureTarget` under Properties)
|
||||
|
||||
## Subviews
|
||||
This component supports subviews, so if you wish to use the camera view as a background or if you want to layout buttons/images/etc. inside the camera then you can do that.
|
||||
|
@ -181,7 +193,7 @@ This component supports subviews, so if you wish to use the camera view as a bac
|
|||
## Todo
|
||||
These are some features I think would be important/beneficial to have included with this module. Pull requests welcome!
|
||||
|
||||
- [ ] Video support
|
||||
- [x] Video support
|
||||
- [x] Flash mode setting
|
||||
- [x] Automatic orientation adjustment
|
||||
- [ ] Tap to focus
|
||||
|
|
Loading…
Reference in New Issue