mirror of
https://github.com/status-im/react-native-camera.git
synced 2025-02-24 01:38:18 +00:00
Merge pull request #151 from christopherdro/capture-temp
Add option to store captures in temp directory
This commit is contained in:
commit
114476de03
@ -17,7 +17,8 @@ typedef NS_ENUM(NSInteger, RCTCameraCaptureMode) {
|
|||||||
typedef NS_ENUM(NSInteger, RCTCameraCaptureTarget) {
|
typedef NS_ENUM(NSInteger, RCTCameraCaptureTarget) {
|
||||||
RCTCameraCaptureTargetMemory = 0,
|
RCTCameraCaptureTargetMemory = 0,
|
||||||
RCTCameraCaptureTargetDisk = 1,
|
RCTCameraCaptureTargetDisk = 1,
|
||||||
RCTCameraCaptureTargetCameraRoll = 2
|
RCTCameraCaptureTargetTemp = 2,
|
||||||
|
RCTCameraCaptureTargetCameraRoll = 3
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef NS_ENUM(NSInteger, RCTCameraOrientation) {
|
typedef NS_ENUM(NSInteger, RCTCameraOrientation) {
|
||||||
|
@ -65,6 +65,7 @@ RCT_EXPORT_VIEW_PROPERTY(torchMode, NSInteger);
|
|||||||
@"CaptureTarget": @{
|
@"CaptureTarget": @{
|
||||||
@"memory": @(RCTCameraCaptureTargetMemory),
|
@"memory": @(RCTCameraCaptureTargetMemory),
|
||||||
@"disk": @(RCTCameraCaptureTargetDisk),
|
@"disk": @(RCTCameraCaptureTargetDisk),
|
||||||
|
@"temp": @(RCTCameraCaptureTargetTemp),
|
||||||
@"cameraRoll": @(RCTCameraCaptureTargetCameraRoll)
|
@"cameraRoll": @(RCTCameraCaptureTargetCameraRoll)
|
||||||
},
|
},
|
||||||
@"Orientation": @{
|
@"Orientation": @{
|
||||||
@ -250,12 +251,12 @@ RCT_EXPORT_METHOD(stopCapture) {
|
|||||||
#if TARGET_IPHONE_SIMULATOR
|
#if TARGET_IPHONE_SIMULATOR
|
||||||
return;
|
return;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
dispatch_async(self.sessionQueue, ^{
|
dispatch_async(self.sessionQueue, ^{
|
||||||
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
|
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
|
||||||
self.presetCamera = AVCaptureDevicePositionBack;
|
self.presetCamera = AVCaptureDevicePositionBack;
|
||||||
}
|
}
|
||||||
|
|
||||||
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
||||||
if ([self.session canAddOutput:stillImageOutput])
|
if ([self.session canAddOutput:stillImageOutput])
|
||||||
{
|
{
|
||||||
@ -263,14 +264,14 @@ RCT_EXPORT_METHOD(stopCapture) {
|
|||||||
[self.session addOutput:stillImageOutput];
|
[self.session addOutput:stillImageOutput];
|
||||||
self.stillImageOutput = stillImageOutput;
|
self.stillImageOutput = stillImageOutput;
|
||||||
}
|
}
|
||||||
|
|
||||||
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
||||||
if ([self.session canAddOutput:movieFileOutput])
|
if ([self.session canAddOutput:movieFileOutput])
|
||||||
{
|
{
|
||||||
[self.session addOutput:movieFileOutput];
|
[self.session addOutput:movieFileOutput];
|
||||||
self.movieFileOutput = movieFileOutput;
|
self.movieFileOutput = movieFileOutput;
|
||||||
}
|
}
|
||||||
|
|
||||||
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
|
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
|
||||||
if ([self.session canAddOutput:metadataOutput]) {
|
if ([self.session canAddOutput:metadataOutput]) {
|
||||||
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
|
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
|
||||||
@ -278,7 +279,7 @@ RCT_EXPORT_METHOD(stopCapture) {
|
|||||||
[metadataOutput setMetadataObjectTypes:metadataOutput.availableMetadataObjectTypes];
|
[metadataOutput setMetadataObjectTypes:metadataOutput.availableMetadataObjectTypes];
|
||||||
self.metadataOutput = metadataOutput;
|
self.metadataOutput = metadataOutput;
|
||||||
}
|
}
|
||||||
|
|
||||||
__weak RCTCameraManager *weakSelf = self;
|
__weak RCTCameraManager *weakSelf = self;
|
||||||
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
|
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
|
||||||
RCTCameraManager *strongSelf = weakSelf;
|
RCTCameraManager *strongSelf = weakSelf;
|
||||||
@ -287,7 +288,7 @@ RCT_EXPORT_METHOD(stopCapture) {
|
|||||||
[strongSelf.session startRunning];
|
[strongSelf.session startRunning];
|
||||||
});
|
});
|
||||||
}]];
|
}]];
|
||||||
|
|
||||||
[self.session startRunning];
|
[self.session startRunning];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -296,14 +297,14 @@ RCT_EXPORT_METHOD(stopCapture) {
|
|||||||
#if TARGET_IPHONE_SIMULATOR
|
#if TARGET_IPHONE_SIMULATOR
|
||||||
return;
|
return;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
dispatch_async(self.sessionQueue, ^{
|
dispatch_async(self.sessionQueue, ^{
|
||||||
[self.previewLayer removeFromSuperlayer];
|
[self.previewLayer removeFromSuperlayer];
|
||||||
[self.session stopRunning];
|
[self.session stopRunning];
|
||||||
for(AVCaptureInput *input in self.session.inputs) {
|
for(AVCaptureInput *input in self.session.inputs) {
|
||||||
[self.session removeInput:input];
|
[self.session removeInput:input];
|
||||||
}
|
}
|
||||||
|
|
||||||
for(AVCaptureOutput *output in self.session.outputs) {
|
for(AVCaptureOutput *output in self.session.outputs) {
|
||||||
[self.session removeOutput:output];
|
[self.session removeOutput:output];
|
||||||
}
|
}
|
||||||
@ -312,19 +313,19 @@ RCT_EXPORT_METHOD(stopCapture) {
|
|||||||
|
|
||||||
- (void)initializeCaptureSessionInput:(NSString *)type {
|
- (void)initializeCaptureSessionInput:(NSString *)type {
|
||||||
dispatch_async(self.sessionQueue, ^{
|
dispatch_async(self.sessionQueue, ^{
|
||||||
|
|
||||||
[self.session beginConfiguration];
|
[self.session beginConfiguration];
|
||||||
|
|
||||||
NSError *error = nil;
|
NSError *error = nil;
|
||||||
AVCaptureDevice *captureDevice;
|
AVCaptureDevice *captureDevice;
|
||||||
|
|
||||||
if (type == AVMediaTypeAudio) {
|
if (type == AVMediaTypeAudio) {
|
||||||
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
||||||
}
|
}
|
||||||
else if (type == AVMediaTypeVideo) {
|
else if (type == AVMediaTypeVideo) {
|
||||||
captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
|
captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
|
||||||
}
|
}
|
||||||
|
|
||||||
if (captureDevice == nil) {
|
if (captureDevice == nil) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -451,13 +452,21 @@ RCT_EXPORT_METHOD(stopCapture) {
|
|||||||
else if (target == RCTCameraCaptureTargetDisk) {
|
else if (target == RCTCameraCaptureTargetDisk) {
|
||||||
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||||
NSString *documentsDirectory = [paths firstObject];
|
NSString *documentsDirectory = [paths firstObject];
|
||||||
|
|
||||||
NSFileManager *fileManager = [NSFileManager defaultManager];
|
NSFileManager *fileManager = [NSFileManager defaultManager];
|
||||||
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"jpg"];
|
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"jpg"];
|
||||||
|
|
||||||
[fileManager createFileAtPath:fullPath contents:imageData attributes:nil];
|
[fileManager createFileAtPath:fullPath contents:imageData attributes:nil];
|
||||||
responseString = fullPath;
|
responseString = fullPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
else if (target == RCTCameraCaptureTargetTemp) {
|
||||||
|
NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
|
||||||
|
NSString *fullPath = [NSString stringWithFormat:@"%@%@.jpg", NSTemporaryDirectory(), fileName];
|
||||||
|
|
||||||
|
[imageData writeToFile:fullPath atomically:YES];
|
||||||
|
responseString = fullPath;
|
||||||
|
}
|
||||||
|
|
||||||
else if (target == RCTCameraCaptureTargetCameraRoll) {
|
else if (target == RCTCameraCaptureTargetCameraRoll) {
|
||||||
[[[ALAssetsLibrary alloc] init] writeImageDataToSavedPhotosAlbum:imageData metadata:metadata completionBlock:^(NSURL* url, NSError* error) {
|
[[[ALAssetsLibrary alloc] init] writeImageDataToSavedPhotosAlbum:imageData metadata:metadata completionBlock:^(NSURL* url, NSError* error) {
|
||||||
@ -591,6 +600,20 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
|
|||||||
}
|
}
|
||||||
self.videoCallback(@[[NSNull null], fullPath]);
|
self.videoCallback(@[[NSNull null], fullPath]);
|
||||||
}
|
}
|
||||||
|
else if (self.videoTarget == RCTCameraCaptureTargetTemp) {
|
||||||
|
NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
|
||||||
|
NSString *fullPath = [NSString stringWithFormat:@"%@%@.mov", NSTemporaryDirectory(), fileName];
|
||||||
|
|
||||||
|
NSFileManager * fileManager = [NSFileManager defaultManager];
|
||||||
|
NSError * error = nil;
|
||||||
|
|
||||||
|
//copying destination
|
||||||
|
if (!([fileManager copyItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
|
||||||
|
self.videoCallback(@[RCTMakeError(error.description, nil, nil)]);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
self.videoCallback(@[[NSNull null], fullPath]);
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
self.videoCallback(@[RCTMakeError(@"Target not supported", nil, nil)]);
|
self.videoCallback(@[RCTMakeError(@"Target not supported", nil, nil)]);
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user