Merge branch 'master' into width_control_and_rotation_fix

This commit is contained in:
Michaël Villeneuve 2018-02-26 14:13:22 +01:00 committed by GitHub
commit 69efb06d42
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 193 additions and 0 deletions

View File

@ -250,6 +250,8 @@ Supported options:
- `fixOrientation` (android only, boolean true or false) Use this with `true` if you want to fix incorrect image orientation (can take up to 5 seconds on some devices). Do not provide this if you only need EXIF based orientation.
- `forceUpOrientation` (iOS only, boolean true or false). This property allows to force portrait orientation based on actual data instead of exif data.
The promise will be fulfilled with an object with some of the following properties:
- `width`: returns the image's width (taking image orientation into account)

View File

@ -324,6 +324,9 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
if ([options[@"mirrorImage"] boolValue]) {
takenImage = [RNImageUtils mirrorImage:takenImage];
}
if ([options[@"forceUpOrientation"] boolValue]) {
takenImage = [RNImageUtils forceUpOrientation:takenImage];
}
if ([options[@"width"] integerValue]) {
takenImage = [RNImageUtils scaleImage:takenImage toWidth:[options[@"width"] integerValue]];
@ -340,6 +343,8 @@ static NSDictionary *defaultFaceDetectorOptions = nil;
if ([options[@"base64"] boolValue]) {
response[@"base64"] = [takenImageData base64EncodedStringWithOptions:0];
}
if ([options[@"exif"] boolValue]) {
int imageRotation;

View File

@ -14,6 +14,7 @@
+ (UIImage *)generatePhotoOfSize:(CGSize)size;
+ (UIImage *)cropImage:(UIImage *)image toRect:(CGRect)rect;
+ (UIImage *)mirrorImage:(UIImage *)image;
+ (UIImage *)forceUpOrientation:(UIImage *)image;
+ (NSString *)writeImage:(NSData *)image toPath:(NSString *)path;
+ (UIImage *) scaleImage:(UIImage*)image toWidth:(NSInteger)width;
+ (void)updatePhotoMetadata:(CMSampleBufferRef)imageSampleBuffer withAdditionalData:(NSDictionary *)additionalData inResponse:(NSMutableDictionary *)response;

View File

@ -81,6 +81,18 @@
return [UIImage imageWithCGImage:[newImage CGImage] scale:1.0 orientation:(newImage.imageOrientation)];
}
+ (UIImage *)forceUpOrientation:(UIImage *)image
{
if (image.imageOrientation != UIImageOrientationUp) {
UIGraphicsBeginImageContextWithOptions(image.size, NO, image.scale);
[image drawInRect:CGRectMake(0, 0, image.size.width, image.size.height)];
image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
return image;
}
+ (void)updatePhotoMetadata:(CMSampleBufferRef)imageSampleBuffer withAdditionalData:(NSDictionary *)additionalData inResponse:(NSMutableDictionary *)response
{
CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);

View File

@ -48,6 +48,7 @@
]
},
"main": "src/index.js",
"types": "types",
"nativePackage": true,
"pre-commit": "lint:staged",
"repository": {

172
types/index.d.ts vendored Normal file
View File

@ -0,0 +1,172 @@
// Type definitions for react-native-camera 1.0
// Definitions by Felipe Constantino <https://github.com/fconstant>
// If you modify this file, put your GitHub info here as well (for easy contacting purposes)
/*
* Author notes:
* I've tried to find a easy tool to convert from Flow to Typescript definition files (.d.ts).
* So we woudn't have to do it manually... Sadly, I haven't found it.
*
* If you are seeing this from the future, please, send us your cutting-edge technology :) (if it exists)
*/
import { Component } from 'react';
import { ViewProperties } from "react-native";
type AutoFocus = { on: any, off: any };
type FlashMode = { on: any, off: any, torch: any, auto: any };
type CameraType = { front: any, back: any };
type WhiteBalance = { sunny: any, cloudy: any, shadow: any, incandescent: any, fluorescent: any, auto: any };
type BarCodeType = { aztec: any, code128: any, code39: any, code39mod43: any, code93: any, ean13: any, ean8: any, pdf417: any, qr: any, upce: any, interleaved2of5: any, itf14: any, datamatrix: any };
type VideoQuality = { '2160p': any, '1080p': any, '720p': any, '480p': any, '4:3': any };
type FaceDetectionClassifications = { all: any, none: any };
type FaceDetectionLandmarks = { all: any, none: any };
type FaceDetectionMode = { fast: any, accurate: any };
export interface Constants {
AutoFocus: AutoFocus;
FlashMode: FlashMode;
Type: CameraType;
WhiteBalance: WhiteBalance;
VideoQuality: VideoQuality;
BarCodeType: BarCodeType;
FaceDetection: {
Classifications: FaceDetectionClassifications;
Landmarks: FaceDetectionLandmarks;
Mode: FaceDetectionMode;
}
}
export interface RNCameraProps {
autoFocus?: keyof AutoFocus;
type?: keyof CameraType;
flashMode?: keyof FlashMode;
notAuthorizedView?: JSX.Element;
pendingAuthorizationView?: JSX.Element;
onCameraReady?(): void;
onMountError?(): void;
/** Value: float from 0 to 1.0 */
zoom?: number;
/** Value: float from 0 to 1.0 */
focusDepth?: number;
// -- BARCODE PROPS
barCodeTypes?: Array<keyof BarCodeType>;
onBarCodeRead?(data: string, type: keyof BarCodeType): void;
// -- FACE DETECTION PROPS
onFacesDetected?(response: { faces: Face[] }): void;
onFaceDetectionError?(response: { isOperational: boolean }): void;
faceDetectionMode?: keyof FaceDetectionMode;
faceDetectionLandmarks?: keyof FaceDetectionLandmarks;
faceDetectionClassifications?: keyof FaceDetectionClassifications;
// -- ANDROID ONLY PROPS
/** Android only */
ratio?: number;
/** Android only */
permissionDialogTitle?: string;
/** Android only */
permissionDialogMessage?: string;
// -- IOS ONLY PROPS
/** iOS Only */
captureAudio?: boolean;
}
interface Point {
x: number,
y: number
}
interface Face {
faceID?: number,
bounds: {
size: {
width: number;
height: number;
};
origin: Point;
};
smilingProbability?: number;
leftEarPosition?: Point;
rightEarPosition?: Point;
leftEyePosition?: Point;
leftEyeOpenProbability?: number;
rightEyePosition?: Point;
rightEyeOpenProbability?: number;
leftCheekPosition?: Point;
rightCheekPosition?: Point;
leftMouthPosition?: Point;
mouthPosition?: Point;
rightMouthPosition?: Point;
bottomMouthPosition?: Point;
noseBasePosition?: Point;
yawAngle?: number;
rollAngle?: number;
}
interface TakePictureOptions {
quality?: number;
base64?: boolean;
exif?: boolean;
}
interface TakePictureResponse {
width: number;
height: number;
uri: string;
base64?: string;
exif?: { [name: string]: any };
}
interface RecordOptions {
quality?: keyof VideoQuality;
maxDuration?: number;
maxFileSize?: number;
mute?: boolean;
}
interface RecordResponse {
/** Path to the video saved on your app's cache directory. */
uri: string;
}
export class RNCamera extends Component<RNCameraProps & ViewProperties> {
static Constants: Constants;
takePictureAsync(options?: TakePictureOptions): Promise<TakePictureResponse>;
recordAsync(options?: RecordOptions): Promise<RecordResponse>;
stopRecording(): void;
/** Android only */
getSupportedRatiosAsync(): Promise<string[]>;
}
interface DetectionOptions {
mode?: keyof FaceDetectionMode,
detectLandmarks?: keyof FaceDetectionLandmarks,
runClassifications?: keyof FaceDetectionClassifications
}
export class FaceDetector {
private constructor();
static Constants: Constants['FaceDetection'];
static detectFacesAsync(uri: string, options?: DetectionOptions): Promise<Face[]>;
}
// -- DEPRECATED CONTENT BELOW
/**
* @deprecated As of 1.0.0 release, RCTCamera is deprecated. Please use RNCamera for the latest fixes and improvements.
*/
export default class RCTCamera extends Component<any> {
static constants: any;
}