Merge branch 'feature/video'
This commit is contained in:
commit
2b2dd33b2f
|
@ -74,7 +74,8 @@ var Camera = React.createClass({
|
|||
|
||||
getInitialState() {
|
||||
return {
|
||||
isAuthorized: false
|
||||
isAuthorized: false,
|
||||
isRecording: false
|
||||
};
|
||||
},
|
||||
|
||||
|
@ -88,6 +89,10 @@ var Camera = React.createClass({
|
|||
|
||||
componentWillUnmount() {
|
||||
this.cameraBarCodeReadListener.remove();
|
||||
|
||||
if (this.state.isRecording) {
|
||||
this.stopRecording();
|
||||
}
|
||||
},
|
||||
|
||||
render() {
|
||||
|
@ -183,12 +188,23 @@ var Camera = React.createClass({
|
|||
if (typeof options.mode === 'string') {
|
||||
options.mode = constants.CaptureMode[options.mode];
|
||||
}
|
||||
|
||||
if (options.mode === constants.CaptureMode.video) {
|
||||
options.totalSeconds = (options.totalSeconds > -1 ? options.totalSeconds : -1);
|
||||
options.preferredTimeScale = options.preferredTimeScale || 30;
|
||||
this.setState({ isRecording: true });
|
||||
}
|
||||
|
||||
if (typeof options.target === 'string') {
|
||||
options.target = constants.CaptureTarget[options.target];
|
||||
}
|
||||
|
||||
NativeModules.CameraManager.capture(options, cb);
|
||||
},
|
||||
|
||||
stopCapture() {
|
||||
this.setState({ isRecording: false });
|
||||
NativeModules.CameraManager.stopCapture();
|
||||
}
|
||||
|
||||
});
|
||||
|
|
|
@ -45,16 +45,19 @@ typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
|
|||
RCTCameraTorchModeAuto = AVCaptureTorchModeAuto
|
||||
};
|
||||
|
||||
@interface RCTCameraManager : RCTViewManager<AVCaptureMetadataOutputObjectsDelegate>
|
||||
@interface RCTCameraManager : RCTViewManager<AVCaptureMetadataOutputObjectsDelegate, AVCaptureFileOutputRecordingDelegate>
|
||||
|
||||
@property (nonatomic) dispatch_queue_t sessionQueue;
|
||||
@property (nonatomic) AVCaptureSession *session;
|
||||
@property (nonatomic) AVCaptureDeviceInput *captureDeviceInput;
|
||||
@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
|
||||
@property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
|
||||
@property (nonatomic) AVCaptureMetadataOutput *metadataOutput;
|
||||
@property (nonatomic) id runtimeErrorHandlingObserver;
|
||||
@property (nonatomic) NSInteger presetCamera;
|
||||
@property (nonatomic) AVCaptureVideoPreviewLayer *previewLayer;
|
||||
@property (nonatomic) NSInteger videoTarget;
|
||||
@property (nonatomic, strong) RCTResponseSenderBlock videoCallback;
|
||||
|
||||
- (void)changeAspect:(NSString *)aspect;
|
||||
- (void)changeCamera:(NSInteger)camera;
|
||||
|
@ -63,5 +66,6 @@ typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
|
|||
- (void)changeTorchMode:(NSInteger)torchMode;
|
||||
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position;
|
||||
- (void)capture:(NSDictionary*)options callback:(RCTResponseSenderBlock)callback;
|
||||
- (void)stopCapture;
|
||||
|
||||
@end
|
||||
|
|
|
@ -111,6 +111,21 @@ RCT_EXPORT_VIEW_PROPERTY(torchMode, NSInteger);
|
|||
self.captureDeviceInput = captureDeviceInput;
|
||||
}
|
||||
}
|
||||
|
||||
AVCaptureDevice *audioCaptureDevice = [self deviceWithMediaType:AVMediaTypeAudio preferringPosition:self.presetCamera];
|
||||
if (audioCaptureDevice != nil) {
|
||||
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
|
||||
|
||||
if (error)
|
||||
{
|
||||
NSLog(@"%@", error);
|
||||
}
|
||||
|
||||
if ([self.session canAddInput:audioInput])
|
||||
{
|
||||
[self.session addInput:audioInput];
|
||||
}
|
||||
}
|
||||
|
||||
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
|
||||
if ([self.session canAddOutput:stillImageOutput])
|
||||
|
@ -119,6 +134,13 @@ RCT_EXPORT_VIEW_PROPERTY(torchMode, NSInteger);
|
|||
[self.session addOutput:stillImageOutput];
|
||||
self.stillImageOutput = stillImageOutput;
|
||||
}
|
||||
|
||||
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
||||
if ([self.session canAddOutput:movieFileOutput])
|
||||
{
|
||||
[self.session addOutput:movieFileOutput];
|
||||
self.movieFileOutput = movieFileOutput;
|
||||
}
|
||||
|
||||
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
|
||||
if ([self.session canAddOutput:metadataOutput]) {
|
||||
|
@ -228,7 +250,25 @@ RCT_EXPORT_METHOD(capture:(NSDictionary *)options callback:(RCTResponseSenderBlo
|
|||
[self captureStill:captureTarget callback:callback];
|
||||
}
|
||||
else if (captureMode == RCTCameraCaptureModeVideo) {
|
||||
// waiting for incoming PRs
|
||||
if (self.movieFileOutput.recording) {
|
||||
callback(@[RCTMakeError(@"Already Recording", nil, nil)]);
|
||||
return;
|
||||
}
|
||||
|
||||
Float64 totalSeconds = [[options valueForKey:@"totalSeconds"] floatValue];
|
||||
if (totalSeconds > -1) {
|
||||
int32_t preferredTimeScale = [[options valueForKey:@"preferredTimeScale"] intValue];
|
||||
CMTime maxDuration = CMTimeMakeWithSeconds(totalSeconds, preferredTimeScale);
|
||||
self.movieFileOutput.maxRecordedDuration = maxDuration;
|
||||
}
|
||||
|
||||
[self captureVideo:captureTarget callback:callback];
|
||||
}
|
||||
}
|
||||
|
||||
RCT_EXPORT_METHOD(stopCapture) {
|
||||
if (self.movieFileOutput.recording) {
|
||||
[self.movieFileOutput stopRecording];
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -287,6 +327,82 @@ RCT_EXPORT_METHOD(capture:(NSDictionary *)options callback:(RCTResponseSenderBlo
|
|||
callback(@[[NSNull null], responseString]);
|
||||
}
|
||||
|
||||
-(void)captureVideo:(NSInteger)target callback:(RCTResponseSenderBlock)callback {
|
||||
|
||||
[[self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:self.previewLayer.connection.videoOrientation];
|
||||
|
||||
//Create temporary URL to record to
|
||||
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
|
||||
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
|
||||
NSFileManager *fileManager = [NSFileManager defaultManager];
|
||||
if ([fileManager fileExistsAtPath:outputPath]) {
|
||||
NSError *error;
|
||||
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
|
||||
callback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
//Start recording
|
||||
[self.movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
|
||||
|
||||
self.videoCallback = callback;
|
||||
self.videoTarget = target;
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
|
||||
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
|
||||
fromConnections:(NSArray *)connections
|
||||
error:(NSError *)error
|
||||
{
|
||||
|
||||
BOOL recordSuccess = YES;
|
||||
if ([error code] != noErr) {
|
||||
// A problem occurred: Find out if the recording was successful.
|
||||
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
|
||||
if (value) {
|
||||
recordSuccess = [value boolValue];
|
||||
}
|
||||
}
|
||||
if (!recordSuccess) {
|
||||
self.videoCallback(@[RCTMakeError(@"Error while recording", nil, nil)]);
|
||||
return;
|
||||
}
|
||||
|
||||
if (self.videoTarget == RCTCameraCaptureTargetCameraRoll) {
|
||||
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
|
||||
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
|
||||
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
|
||||
completionBlock:^(NSURL *assetURL, NSError *error) {
|
||||
if (error) {
|
||||
self.videoCallback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
return;
|
||||
}
|
||||
|
||||
self.videoCallback(@[[NSNull null], [assetURL absoluteString]]);
|
||||
}];
|
||||
}
|
||||
}
|
||||
else if (self.videoTarget == RCTCameraCaptureTargetDisk) {
|
||||
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||
NSString *documentsDirectory = [paths firstObject];
|
||||
NSString *fullPath = [[documentsDirectory stringByAppendingPathComponent:[[NSUUID UUID] UUIDString]] stringByAppendingPathExtension:@"mov"];
|
||||
|
||||
NSFileManager * fileManager = [NSFileManager defaultManager];
|
||||
NSError * error = nil;
|
||||
|
||||
//copying destination
|
||||
if (!([fileManager copyItemAtPath:[outputFileURL path] toPath:fullPath error:&error])) {
|
||||
self.videoCallback(@[RCTMakeError(error.description, nil, nil)]);
|
||||
return;
|
||||
}
|
||||
self.videoCallback(@[[NSNull null], fullPath]);
|
||||
}
|
||||
else {
|
||||
self.videoCallback(@[RCTMakeError(@"Target not supported", nil, nil)]);
|
||||
}
|
||||
}
|
||||
|
||||
- (NSString *)saveImage:(UIImage *)image withName:(NSString *)name {
|
||||
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||
NSString *documentsDirectory = [paths firstObject];
|
||||
|
|
Loading…
Reference in New Issue