I believe this will fix issues #52 and #58. There are a lot of changes here due to a whitespace formatting change.

This commit is contained in:
Lochlan Wansbrough 2015-07-09 20:11:50 -07:00
parent 87a8389c08
commit 65b5033596
5 changed files with 501 additions and 461 deletions

View File

@ -27,6 +27,7 @@ var Camera = React.createClass({
PropTypes.string,
PropTypes.number
]),
captureAudio: PropTypes.bool,
captureMode: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number
@ -65,6 +66,7 @@ var Camera = React.createClass({
aspect: constants.Aspect.fill,
type: constants.Type.back,
orientation: constants.Orientation.auto,
captureAudio: true,
captureMode: constants.CaptureMode.still,
captureTarget: constants.CaptureTarget.memory,
flashMode: constants.FlashMode.off,
@ -185,6 +187,7 @@ var Camera = React.createClass({
}
options = Object.assign({}, {
audio: this.props.captureAudio,
mode: this.props.captureMode,
target: this.props.captureTarget
}, options);
@ -207,8 +210,10 @@ var Camera = React.createClass({
},
stopCapture() {
this.setState({ isRecording: false });
if (this.state.isRecording) {
NativeModules.CameraManager.stopCapture();
this.setState({ isRecording: false });
}
}
});

View File

@ -34,6 +34,7 @@
else {
self.manager.presetCamera = type;
}
[self.manager initializeCaptureSessionInput:AVMediaTypeVideo];
}
- (void)setOrientation:(NSInteger)orientation
@ -63,6 +64,7 @@
if ((self = [super init])) {
self.manager = manager;
[self.manager initializeCaptureSessionInput:AVMediaTypeVideo];
}
return self;
}

View File

@ -49,7 +49,8 @@ typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) AVCaptureSession *session;
@property (nonatomic) AVCaptureDeviceInput *captureDeviceInput;
@property (nonatomic) AVCaptureDeviceInput *audioCaptureDeviceInput;
@property (nonatomic) AVCaptureDeviceInput *videoCaptureDeviceInput;
@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
@property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
@property (nonatomic) AVCaptureMetadataOutput *metadataOutput;
@ -66,6 +67,7 @@ typedef NS_ENUM(NSInteger, RCTCameraTorchMode) {
- (void)changeTorchMode:(NSInteger)torchMode;
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position;
- (void)capture:(NSDictionary*)options callback:(RCTResponseSenderBlock)callback;
- (void)initializeCaptureSessionInput:(NSString*)type;
- (void)stopCapture;
@end

View File

@ -90,43 +90,12 @@ RCT_EXPORT_VIEW_PROPERTY(torchMode, NSInteger);
self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
dispatch_async(self.sessionQueue, ^{
NSError *error = nil;
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
self.presetCamera = AVCaptureDevicePositionBack;
}
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
if (captureDevice != nil) {
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
if ([self.session canAddInput:captureDeviceInput])
{
[self.session addInput:captureDeviceInput];
self.captureDeviceInput = captureDeviceInput;
}
}
AVCaptureDevice *audioCaptureDevice = [self deviceWithMediaType:AVMediaTypeAudio preferringPosition:self.presetCamera];
if (audioCaptureDevice != nil) {
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
if ([self.session canAddInput:audioInput])
{
[self.session addInput:audioInput];
}
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([self.session canAddOutput:stillImageOutput])
{
@ -175,7 +144,8 @@ RCT_EXPORT_METHOD(checkDeviceAuthorizationStatus:(RCTResponseSenderBlock) callba
}
RCT_EXPORT_METHOD(changeCamera:(NSInteger)camera) {
AVCaptureDevice *currentCaptureDevice = [self.captureDeviceInput device];
dispatch_async(self.sessionQueue, ^{
AVCaptureDevice *currentCaptureDevice = [self.videoCaptureDeviceInput device];
AVCaptureDevicePosition position = (AVCaptureDevicePosition)camera;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
@ -183,10 +153,12 @@ RCT_EXPORT_METHOD(changeCamera:(NSInteger)camera) {
return;
}
self.presetCamera = camera;
NSError *error = nil;
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
if (error || captureDeviceInput == nil)
{
NSLog(@"%@", error);
return;
@ -194,22 +166,24 @@ RCT_EXPORT_METHOD(changeCamera:(NSInteger)camera) {
[self.session beginConfiguration];
[self.session removeInput:self.captureDeviceInput];
[self.session removeInput:self.videoCaptureDeviceInput];
if ([self.session canAddInput:captureDeviceInput])
{
[self.session addInput:captureDeviceInput];
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
[self.session addInput:captureDeviceInput];
self.captureDeviceInput = captureDeviceInput;
self.videoCaptureDeviceInput = captureDeviceInput;
}
else
{
[self.session addInput:self.captureDeviceInput];
[self.session addInput:self.videoCaptureDeviceInput];
}
[self.session commitConfiguration];
});
}
RCT_EXPORT_METHOD(changeAspect:(NSString *)aspect) {
@ -217,7 +191,7 @@ RCT_EXPORT_METHOD(changeAspect:(NSString *)aspect) {
}
RCT_EXPORT_METHOD(changeFlashMode:(NSInteger)flashMode) {
AVCaptureDevice *device = [self.captureDeviceInput device];
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
if (![device hasFlash]) return;
@ -234,7 +208,7 @@ RCT_EXPORT_METHOD(changeOrientation:(NSInteger)orientation) {
}
RCT_EXPORT_METHOD(changeTorchMode:(NSInteger)torchMode) {
AVCaptureDevice *device = [self.captureDeviceInput device];
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;
if (![device hasTorch]) return;
@ -251,22 +225,10 @@ RCT_EXPORT_METHOD(capture:(NSDictionary *)options callback:(RCTResponseSenderBlo
NSInteger captureTarget = [[options valueForKey:@"target"] intValue];
if (captureMode == RCTCameraCaptureModeStill) {
[self captureStill:captureTarget callback:callback];
[self captureStill:captureTarget options:options callback:callback];
}
else if (captureMode == RCTCameraCaptureModeVideo) {
if (self.movieFileOutput.recording) {
callback(@[RCTMakeError(@"Already Recording", nil, nil)]);
return;
}
Float64 totalSeconds = [[options valueForKey:@"totalSeconds"] floatValue];
if (totalSeconds > -1) {
int32_t preferredTimeScale = [[options valueForKey:@"preferredTimeScale"] intValue];
CMTime maxDuration = CMTimeMakeWithSeconds(totalSeconds, preferredTimeScale);
self.movieFileOutput.maxRecordedDuration = maxDuration;
}
[self captureVideo:captureTarget callback:callback];
[self captureVideo:captureTarget options:options callback:callback];
}
}
@ -276,7 +238,46 @@ RCT_EXPORT_METHOD(stopCapture) {
}
}
- (void)captureStill:(NSInteger)target callback:(RCTResponseSenderBlock)callback {
- (void)initializeCaptureSessionInput:(NSString *)type {
dispatch_async(self.sessionQueue, ^{
NSError *error = nil;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
if (captureDevice == nil) {
return;
}
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error || captureDeviceInput == nil) {
NSLog(@"%@", error);
return;
}
[self.session beginConfiguration];
if (type == AVMediaTypeAudio) {
[self.session removeInput:self.audioCaptureDeviceInput];
}
else if (type == AVMediaTypeVideo) {
[self.session removeInput:self.videoCaptureDeviceInput];
}
if ([self.session canAddInput:captureDeviceInput]) {
[self.session addInput:captureDeviceInput];
if (type == AVMediaTypeAudio) {
self.audioCaptureDeviceInput = captureDeviceInput;
}
else if (type == AVMediaTypeVideo) {
self.videoCaptureDeviceInput = captureDeviceInput;
}
}
[self.session commitConfiguration];
});
}
- (void)captureStill:(NSInteger)target options:(NSDictionary *)options callback:(RCTResponseSenderBlock)callback {
if ([[[UIDevice currentDevice].model lowercaseString] rangeOfString:@"simulator"].location != NSNotFound){
CGSize size = CGSizeMake(720, 1280);
@ -331,8 +332,25 @@ RCT_EXPORT_METHOD(stopCapture) {
callback(@[[NSNull null], responseString]);
}
-(void)captureVideo:(NSInteger)target callback:(RCTResponseSenderBlock)callback {
-(void)captureVideo:(NSInteger)target options:(NSDictionary *)options callback:(RCTResponseSenderBlock)callback {
if (self.movieFileOutput.recording) {
callback(@[RCTMakeError(@"Already Recording", nil, nil)]);
return;
}
if ([options valueForKey:@"audio"]) {
[self initializeCaptureSessionInput:AVMediaTypeAudio];
}
Float64 totalSeconds = [[options valueForKey:@"totalSeconds"] floatValue];
if (totalSeconds > -1) {
int32_t preferredTimeScale = [[options valueForKey:@"preferredTimeScale"] intValue];
CMTime maxDuration = CMTimeMakeWithSeconds(totalSeconds, preferredTimeScale);
self.movieFileOutput.maxRecordedDuration = maxDuration;
}
dispatch_async(self.sessionQueue, ^{
[[self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:self.previewLayer.connection.videoOrientation];
//Create temporary URL to record to
@ -352,6 +370,7 @@ RCT_EXPORT_METHOD(stopCapture) {
self.videoCallback = callback;
self.videoTarget = target;
});
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
@ -503,7 +522,7 @@ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self captureDeviceInput] device];
AVCaptureDevice *device = [[self videoCaptureDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{

View File

@ -112,6 +112,12 @@ Values: `Camera.constants.Aspect.fit` or `"fit"`, `Camera.constants.Aspect.fill`
The `aspect` property allows you to define how your viewfinder renders the camera's view. For instance, if you have a square viewfinder and you want to fill the it entirely, you have two options: `"fill"`, where the aspect ratio of the camera's view is preserved by cropping the view or `"stretch"`, where the aspect ratio is skewed in order to fit the entire image inside the viewfinder. The other option is `"fit"`, which ensures the camera's entire view fits inside your viewfinder without altering the aspect ratio.
#### `captureAudio`
Values: `true` (default), `false` (Boolean)
*Applies to video capture mode only.* Specifies whether or not audio should be captured with the video.
#### `captureMode`
@ -173,7 +179,13 @@ You can access component methods by adding a `ref` (ie. `ref="camera"`) prop to
#### `capture([options,] callback)`
Captures data from the camera. What is captured is based on the `captureMode` and `captureTarget` props. `captureMode` tells the camera whether you want a still image or -- in the future, this is not currently supported -- video. `captureTarget` allows you to specify how you want the data to be captured and sent back to you. The available `target`s are `Camera.constants.CaptureTarget.memory` and `Camera.constants.CaptureTarget.disk` - the latter has been shown to dramatically improve camera performance.
Captures data from the camera. What is captured is based on the `captureMode` and `captureTarget` props. `captureMode` tells the camera whether you want a still image or video. `captureTarget` allows you to specify how you want the data to be captured and sent back to you. The available `target`s are `Camera.constants.CaptureTarget.memory` and `Camera.constants.CaptureTarget.disk` - the latter has been shown to dramatically improve camera performance.
Supported options:
- `audio` (See `captureAudio` under Properties)
- `mode` (See `captureMode` under Properties)
- `target` (See `captureTarget` under Properties)
## Subviews
This component supports subviews, so if you wish to use the camera view as a background or if you want to layout buttons/images/etc. inside the camera then you can do that.
@ -181,7 +193,7 @@ This component supports subviews, so if you wish to use the camera view as a bac
## Todo
These are some features I think would be important/beneficial to have included with this module. Pull requests welcome!
- [ ] Video support
- [x] Video support
- [x] Flash mode setting
- [x] Automatic orientation adjustment
- [ ] Tap to focus