diff --git a/README.md b/README.md index 65dc6d2..d1a2a45 100644 --- a/README.md +++ b/README.md @@ -240,7 +240,7 @@ You can access component methods by adding a `ref` (ie. `ref="camera"`) prop to #### `capture([options]): Promise` -Captures data from the camera. What is captured is based on the `captureMode` and `captureTarget` props. `captureMode` tells the camera whether you want a still image or video. `captureTarget` allows you to specify how you want the data to be captured and sent back to you. See `captureTarget` under Properties to see the available values. The promise will be fulfilled with the image data or file handle of the image on disk, depending on `target`. +Captures data from the camera. What is captured is based on the `captureMode` and `captureTarget` props. `captureMode` tells the camera whether you want a still image or video. `captureTarget` allows you to specify how you want the data to be captured and sent back to you. See `captureTarget` under Properties to see the available values. Supported options: @@ -251,6 +251,15 @@ Supported options: - `location` This is the object returned from `navigator.geolocation.getCurrentPosition()` (React Native's geolocation polyfill). It will add GPS metadata to the image. - `rotation` This will rotate the image by the number of degrees specified. +The promise will be fulfilled with an object with some of the following properties: + + - `data`: Returns a base64-encoded string with the capture data (only returned with the deprecated `Camera.constants.CaptureTarget.memory`) + - `path`: Returns the path of the captured image or video file on disk + - `width`: (currently iOS video only) returns the video file's frame width + - `height`: (currently iOS video only) returns the video file's frame height + - `duration`: (currently iOS video only) video file duration + - `size`: (currently iOS video only) video file size (in bytes) + #### `iOS` `getFOV(): Promise` Returns the camera's current field of view. diff --git a/android/src/main/java/com/lwansbrough/RCTCamera/RCTCameraModule.java b/android/src/main/java/com/lwansbrough/RCTCamera/RCTCameraModule.java index 5331955..0dde182 100644 --- a/android/src/main/java/com/lwansbrough/RCTCamera/RCTCameraModule.java +++ b/android/src/main/java/com/lwansbrough/RCTCamera/RCTCameraModule.java @@ -181,10 +181,12 @@ public class RCTCameraModule extends ReactContextBaseJavaModule { public void onPictureTaken(byte[] data, Camera camera) { camera.stopPreview(); camera.startPreview(); + Map response = new HashMap(); switch (options.getInt("target")) { case RCT_CAMERA_CAPTURE_TARGET_MEMORY: String encoded = Base64.encodeToString(data, Base64.DEFAULT); - promise.resolve(encoded); + response.put("data", encoded); + promise.resolve(response); break; case RCT_CAMERA_CAPTURE_TARGET_CAMERA_ROLL: BitmapFactory.Options bitmapOptions = new BitmapFactory.Options(); @@ -193,7 +195,8 @@ public class RCTCameraModule extends ReactContextBaseJavaModule { _reactContext.getContentResolver(), bitmap, options.getString("title"), options.getString("description")); - promise.resolve(url); + response.put("path", url); + promise.resolve(response); break; case RCT_CAMERA_CAPTURE_TARGET_DISK: File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE); diff --git a/ios/RCTCameraManager.m b/ios/RCTCameraManager.m index c14c553..80e0091 100644 --- a/ios/RCTCameraManager.m +++ b/ios/RCTCameraManager.m @@ -548,7 +548,8 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej NSString *responseString; if (target == RCTCameraCaptureTargetMemory) { - responseString = [imageData base64EncodedStringWithOptions:0]; + resolve(@{@"data":[imageData base64EncodedStringWithOptions:0]}); + return; } else if (target == RCTCameraCaptureTargetDisk) { @@ -573,7 +574,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej else if (target == RCTCameraCaptureTargetCameraRoll) { [[[ALAssetsLibrary alloc] init] writeImageDataToSavedPhotosAlbum:imageData metadata:metadata completionBlock:^(NSURL* url, NSError* error) { if (error == nil) { - resolve([url absoluteString]); + resolve(@{@"path":[url absoluteString]}); } else { reject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description)); @@ -581,7 +582,7 @@ RCT_EXPORT_METHOD(hasFlash:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRej }]; return; } - resolve(responseString); + resolve(@{@"path":responseString}); } - (CGImageRef)newCGImageRotatedByAngle:(CGImageRef)imgRef angle:(CGFloat)angle @@ -666,7 +667,6 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { - BOOL recordSuccess = YES; if ([error code] != noErr) { // A problem occurred: Find out if the recording was successful. @@ -680,6 +680,31 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL return; } + AVURLAsset* videoAsAsset = [AVURLAsset URLAssetWithURL:outputFileURL options:nil]; + AVAssetTrack* videoTrack = [[videoAsAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; + float videoWidth; + float videoHeight; + + CGSize videoSize = [videoTrack naturalSize]; + CGAffineTransform txf = [videoTrack preferredTransform]; + + if ((txf.tx == videoSize.width && txf.ty == videoSize.height) || (txf.tx == 0 && txf.ty == 0)) { + // Video recorded in landscape orientation + videoWidth = videoSize.width; + videoHeight = videoSize.height; + } else { + // Video recorded in portrait orientation, so have to swap reported width/height + videoWidth = videoSize.height; + videoHeight = videoSize.width; + } + + NSMutableDictionary *videoInfo = [NSMutableDictionary dictionaryWithDictionary:@{ + @"duration":[NSNumber numberWithFloat:CMTimeGetSeconds(videoAsAsset.duration)], + @"width":[NSNumber numberWithFloat:videoWidth], + @"height":[NSNumber numberWithFloat:videoHeight], + @"size":[NSNumber numberWithLongLong:captureOutput.recordedFileSize], + }]; + if (self.videoTarget == RCTCameraCaptureTargetCameraRoll) { ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) { @@ -689,7 +714,8 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description)); return; } - self.videoResolve([assetURL absoluteString]); + [videoInfo setObject:[assetURL absoluteString] forKey:@"path"]; + self.videoResolve(videoInfo); }]; } } @@ -706,7 +732,8 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description)); return; } - self.videoResolve(fullPath); + [videoInfo setObject:fullPath forKey:@"path"]; + self.videoResolve(videoInfo); } else if (self.videoTarget == RCTCameraCaptureTargetTemp) { NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString]; @@ -720,7 +747,8 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(error.description)); return; } - self.videoResolve(fullPath); + [videoInfo setObject:fullPath forKey:@"path"]; + self.videoResolve(videoInfo); } else { self.videoReject(RCTErrorUnspecified, nil, RCTErrorWithMessage(@"Target not supported"));