Un-revert D13513777: Replace ALAssets* with PHPhoto* in RCTCameraRoll

Summary: Replaced all deprecated ALAssets* references to roughly equivalent PHPhoto* references in RCTCameraRoll library. There are still some minor inconsistencies between iOS/Android and documentation that existed prior to this diff that need to be resolved after this.

Reviewed By: fkgozali

Differential Revision: D13593314

fbshipit-source-id: 6d3dc43383e3ad6e3dbe73d4ceceac1ba9261d9d
This commit is contained in:
Joshua Gross 2019-01-07 16:11:52 -08:00 committed by Facebook Github Bot
parent ac32e98217
commit e172dc7b94
4 changed files with 263 additions and 257 deletions

View File

@ -8,17 +8,8 @@
#import <React/RCTBridge.h> #import <React/RCTBridge.h>
#import <React/RCTURLRequestHandler.h> #import <React/RCTURLRequestHandler.h>
@class ALAssetsLibrary; @class PHPhotoLibrary;
@interface RCTAssetsLibraryRequestHandler : NSObject <RCTURLRequestHandler> @interface RCTAssetsLibraryRequestHandler : NSObject <RCTURLRequestHandler>
@end @end
@interface RCTBridge (RCTAssetsLibraryImageLoader)
/**
* The shared asset library instance.
*/
@property (nonatomic, readonly) ALAssetsLibrary *assetsLibrary;
@end

View File

@ -11,41 +11,26 @@
#import <dlfcn.h> #import <dlfcn.h>
#import <objc/runtime.h> #import <objc/runtime.h>
#import <AssetsLibrary/AssetsLibrary.h> #import <Photos/Photos.h>
#import <MobileCoreServices/MobileCoreServices.h> #import <MobileCoreServices/MobileCoreServices.h>
#import <React/RCTBridge.h> #import <React/RCTBridge.h>
#import <React/RCTUtils.h> #import <React/RCTUtils.h>
@implementation RCTAssetsLibraryRequestHandler @implementation RCTAssetsLibraryRequestHandler
{
ALAssetsLibrary *_assetsLibrary;
}
RCT_EXPORT_MODULE() RCT_EXPORT_MODULE()
@synthesize bridge = _bridge;
static Class _ALAssetsLibrary = nil;
static void ensureAssetsLibLoaded(void)
{
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
void * handle = dlopen("/System/Library/Frameworks/AssetsLibrary.framework/AssetsLibrary", RTLD_LAZY);
#pragma unused(handle)
_ALAssetsLibrary = objc_getClass("ALAssetsLibrary");
});
}
- (ALAssetsLibrary *)assetsLibrary
{
ensureAssetsLibLoaded();
return _assetsLibrary ?: (_assetsLibrary = [_ALAssetsLibrary new]);
}
#pragma mark - RCTURLRequestHandler #pragma mark - RCTURLRequestHandler
- (BOOL)canHandleRequest:(NSURLRequest *)request - (BOOL)canHandleRequest:(NSURLRequest *)request
{ {
return [request.URL.scheme caseInsensitiveCompare:@"assets-library"] == NSOrderedSame; if (![PHAsset class]) {
return NO;
}
return [request.URL.scheme caseInsensitiveCompare:@"assets-library"] == NSOrderedSame
|| [request.URL.scheme caseInsensitiveCompare:@"ph"] == NSOrderedSame;
} }
- (id)sendRequest:(NSURLRequest *)request - (id)sendRequest:(NSURLRequest *)request
@ -56,55 +41,72 @@ static void ensureAssetsLibLoaded(void)
atomic_store(&cancelled, YES); atomic_store(&cancelled, YES);
}; };
[[self assetsLibrary] assetForURL:request.URL resultBlock:^(ALAsset *asset) { if (!request.URL) {
if (atomic_load(&cancelled)) { NSString *const msg = [NSString stringWithFormat:@"Cannot send request without URL"];
return; [delegate URLRequest:cancellationBlock didCompleteWithError:RCTErrorWithMessage(msg)];
} return cancellationBlock;
}
if (asset) { PHFetchResult<PHAsset *> *fetchResult;
ALAssetRepresentation *representation = [asset defaultRepresentation]; if ([request.URL.scheme caseInsensitiveCompare:@"ph"] == NSOrderedSame) {
NSInteger length = (NSInteger)representation.size; // Fetch assets using PHAsset localIdentifier (recommended)
CFStringRef MIMEType = UTTypeCopyPreferredTagWithClass((__bridge CFStringRef _Nonnull)(representation.UTI), kUTTagClassMIMEType); NSString *const localIdentifier = [request.URL.absoluteString substringFromIndex:@"ph://".length];
fetchResult = [PHAsset fetchAssetsWithLocalIdentifiers:@[localIdentifier] options:nil];
} else if ([request.URL.scheme caseInsensitiveCompare:@"assets-library"] == NSOrderedSame) {
// This is the older, deprecated way of fetching assets from assets-library
// using the "assets-library://" protocol
fetchResult = [PHAsset fetchAssetsWithALAssetURLs:@[request.URL] options:nil];
} else {
NSString *const msg = [NSString stringWithFormat:@"Cannot send request with unknown protocol: %@", request.URL];
[delegate URLRequest:cancellationBlock didCompleteWithError:RCTErrorWithMessage(msg)];
return cancellationBlock;
}
NSURLResponse *response = if (![fetchResult firstObject]) {
[[NSURLResponse alloc] initWithURL:request.URL NSString *errorMessage = [NSString stringWithFormat:@"Failed to load asset"
MIMEType:(__bridge NSString *)(MIMEType) " at URL %@ with no error message.", request.URL];
expectedContentLength:length NSError *error = RCTErrorWithMessage(errorMessage);
textEncodingName:nil]; [delegate URLRequest:cancellationBlock didCompleteWithError:error];
return cancellationBlock;
}
[delegate URLRequest:cancellationBlock didReceiveResponse:response]; if (atomic_load(&cancelled)) {
return cancellationBlock;
}
NSError *error = nil; PHAsset *const _Nonnull asset = [fetchResult firstObject];
uint8_t *buffer = (uint8_t *)malloc((size_t)length);
if ([representation getBytes:buffer
fromOffset:0
length:length
error:&error]) {
NSData *data = [[NSData alloc] initWithBytesNoCopy:buffer // By default, allow downloading images from iCloud
length:length PHImageRequestOptions *const requestOptions = [PHImageRequestOptions new];
freeWhenDone:YES]; requestOptions.networkAccessAllowed = YES;
[delegate URLRequest:cancellationBlock didReceiveData:data]; [[PHImageManager defaultManager] requestImageDataForAsset:asset
[delegate URLRequest:cancellationBlock didCompleteWithError:nil]; options:requestOptions
resultHandler:^(NSData * _Nullable imageData,
} else { NSString * _Nullable dataUTI,
free(buffer); UIImageOrientation orientation,
[delegate URLRequest:cancellationBlock didCompleteWithError:error]; NSDictionary * _Nullable info) {
} NSError *const error = [info objectForKey:PHImageErrorKey];
if (error) {
} else {
NSString *errorMessage = [NSString stringWithFormat:@"Failed to load asset"
" at URL %@ with no error message.", request.URL];
NSError *error = RCTErrorWithMessage(errorMessage);
[delegate URLRequest:cancellationBlock didCompleteWithError:error]; [delegate URLRequest:cancellationBlock didCompleteWithError:error];
}
} failureBlock:^(NSError *loadError) {
if (atomic_load(&cancelled)) {
return; return;
} }
[delegate URLRequest:cancellationBlock didCompleteWithError:loadError];
NSInteger const length = [imageData length];
CFStringRef const dataUTIStringRef = (__bridge CFStringRef _Nonnull)(dataUTI);
CFStringRef const mimeType = UTTypeCopyPreferredTagWithClass(dataUTIStringRef, kUTTagClassMIMEType);
NSURLResponse *const response = [[NSURLResponse alloc] initWithURL:request.URL
MIMEType:(__bridge NSString *)(mimeType)
expectedContentLength:length
textEncodingName:nil];
CFRelease(mimeType);
[delegate URLRequest:cancellationBlock didReceiveResponse:response];
[delegate URLRequest:cancellationBlock didReceiveData:imageData];
[delegate URLRequest:cancellationBlock didCompleteWithError:nil];
}]; }];
return cancellationBlock; return cancellationBlock;
@ -116,12 +118,3 @@ static void ensureAssetsLibLoaded(void)
} }
@end @end
@implementation RCTBridge (RCTAssetsLibraryImageLoader)
- (ALAssetsLibrary *)assetsLibrary
{
return [[self moduleForClass:[RCTAssetsLibraryRequestHandler class]] assetsLibrary];
}
@end

View File

@ -5,18 +5,18 @@
* LICENSE file in the root directory of this source tree. * LICENSE file in the root directory of this source tree.
*/ */
#import <AssetsLibrary/AssetsLibrary.h> #import <Photos/Photos.h>
#import <React/RCTBridgeModule.h> #import <React/RCTBridgeModule.h>
#import <React/RCTConvert.h> #import <React/RCTConvert.h>
@interface RCTConvert (ALAssetGroup) @interface RCTConvert (PHFetchOptions)
+ (ALAssetsGroupType)ALAssetsGroupType:(id)json; + (PHFetchOptions *)PHFetchOptionsFromMediaType:(NSString *)mediaType;
+ (ALAssetsFilter *)ALAssetsFilter:(id)json;
@end @end
@interface RCTCameraRollManager : NSObject <RCTBridgeModule> @interface RCTCameraRollManager : NSObject <RCTBridgeModule>
@end @end

View File

@ -13,6 +13,7 @@
#import <Photos/Photos.h> #import <Photos/Photos.h>
#import <dlfcn.h> #import <dlfcn.h>
#import <objc/runtime.h> #import <objc/runtime.h>
#import <MobileCoreServices/UTType.h>
#import <React/RCTBridge.h> #import <React/RCTBridge.h>
#import <React/RCTConvert.h> #import <React/RCTConvert.h>
@ -22,85 +23,46 @@
#import "RCTAssetsLibraryRequestHandler.h" #import "RCTAssetsLibraryRequestHandler.h"
@implementation RCTConvert (ALAssetGroup) @implementation RCTConvert (PHAssetCollectionSubtype)
RCT_ENUM_CONVERTER(ALAssetsGroupType, (@{ RCT_ENUM_CONVERTER(PHAssetCollectionSubtype, (@{
@"album": @(PHAssetCollectionSubtypeAny),
@"all": @(PHAssetCollectionSubtypeAny),
@"event": @(PHAssetCollectionSubtypeAlbumSyncedEvent),
@"faces": @(PHAssetCollectionSubtypeAlbumSyncedFaces),
@"library": @(PHAssetCollectionSubtypeSmartAlbumUserLibrary),
@"photo-stream": @(PHAssetCollectionSubtypeAlbumMyPhotoStream), // incorrect, but legacy
@"photostream": @(PHAssetCollectionSubtypeAlbumMyPhotoStream),
@"saved-photos": @(PHAssetCollectionSubtypeAny), // incorrect, but legacy
@"savedphotos": @(PHAssetCollectionSubtypeAny), // This was ALAssetsGroupSavedPhotos, seems to have no direct correspondence in PHAssetCollectionSubtype
}), PHAssetCollectionSubtypeAny, integerValue)
// New values
@"album": @(ALAssetsGroupAlbum),
@"all": @(ALAssetsGroupAll),
@"event": @(ALAssetsGroupEvent),
@"faces": @(ALAssetsGroupFaces),
@"library": @(ALAssetsGroupLibrary),
@"photo-stream": @(ALAssetsGroupPhotoStream),
@"saved-photos": @(ALAssetsGroupSavedPhotos),
// Legacy values @end
@"Album": @(ALAssetsGroupAlbum),
@"All": @(ALAssetsGroupAll),
@"Event": @(ALAssetsGroupEvent),
@"Faces": @(ALAssetsGroupFaces),
@"Library": @(ALAssetsGroupLibrary),
@"PhotoStream": @(ALAssetsGroupPhotoStream),
@"SavedPhotos": @(ALAssetsGroupSavedPhotos),
}), ALAssetsGroupSavedPhotos, integerValue) @implementation RCTConvert (PHFetchOptions)
static Class _ALAssetsFilter = nil; + (PHFetchOptions *)PHFetchOptionsFromMediaType:(NSString *)mediaType
static NSString *_ALAssetsGroupPropertyName = nil;
static NSString *_ALAssetPropertyAssetURL = nil;
static NSString *_ALAssetPropertyLocation = nil;
static NSString *_ALAssetPropertyDate = nil;
static NSString *_ALAssetPropertyType = nil;
static NSString *_ALAssetPropertyDuration = nil;
static NSString *_ALAssetTypeVideo = nil;
static NSString *lookupNSString(void * handle, const char * name)
{ {
void ** sym = dlsym(handle, name); // This is not exhaustive in terms of supported media type predicates; more can be added in the future
return (__bridge NSString *)(sym ? *sym : nil); NSString *const lowercase = [mediaType lowercaseString];
}
static void ensureAssetsLibLoaded(void)
{
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
void * handle = dlopen("/System/Library/Frameworks/AssetsLibrary.framework/AssetsLibrary", RTLD_LAZY);
RCTAssert(handle != NULL, @"Unable to load AssetsLibrary.framework.");
_ALAssetsFilter = objc_getClass("ALAssetsFilter");
_ALAssetsGroupPropertyName = lookupNSString(handle, "ALAssetsGroupPropertyName");
_ALAssetPropertyAssetURL = lookupNSString(handle, "ALAssetPropertyAssetURL");
_ALAssetPropertyLocation = lookupNSString(handle, "ALAssetPropertyLocation");
_ALAssetPropertyDate = lookupNSString(handle, "ALAssetPropertyDate");
_ALAssetPropertyType = lookupNSString(handle, "ALAssetPropertyType");
_ALAssetPropertyDuration = lookupNSString(handle, "ALAssetPropertyDuration");
_ALAssetTypeVideo = lookupNSString(handle, "ALAssetTypeVideo");
});
}
+ (ALAssetsFilter *)ALAssetsFilter:(id)json if ([lowercase isEqualToString:@"photos"]) {
{ PHFetchOptions *const options = [PHFetchOptions new];
static NSDictionary<NSString *, ALAssetsFilter *> *options; options.predicate = [NSPredicate predicateWithFormat:@"mediaType = %d", PHAssetMediaTypeImage];
static dispatch_once_t onceToken; return options;
dispatch_once(&onceToken, ^{ } else if ([lowercase isEqualToString:@"videos"]) {
ensureAssetsLibLoaded(); PHFetchOptions *const options = [PHFetchOptions new];
options = @{ options.predicate = [NSPredicate predicateWithFormat:@"mediaType = %d", PHAssetMediaTypeVideo];
// New values return options;
@"photos": [_ALAssetsFilter allPhotos], } else {
@"videos": [_ALAssetsFilter allVideos], if (![lowercase isEqualToString:@"all"]) {
@"all": [_ALAssetsFilter allAssets], RCTLogError(@"Invalid filter option: '%@'. Expected one of 'photos',"
"'videos' or 'all'.", mediaType);
// Legacy values }
@"Photos": [_ALAssetsFilter allPhotos], // This case includes the "all" mediatype
@"Videos": [_ALAssetsFilter allVideos], return nil;
@"All": [_ALAssetsFilter allAssets],
};
});
ALAssetsFilter *filter = options[json ?: @"photos"];
if (!filter) {
RCTLogError(@"Invalid filter option: '%@'. Expected one of 'photos',"
"'videos' or 'all'.", json);
} }
return filter ?: [_ALAssetsFilter allPhotos];
} }
@end @end
@ -111,43 +73,61 @@ RCT_EXPORT_MODULE()
@synthesize bridge = _bridge; @synthesize bridge = _bridge;
static NSString *const kErrorUnableToLoad = @"E_UNABLE_TO_LOAD";
static NSString *const kErrorUnableToSave = @"E_UNABLE_TO_SAVE"; static NSString *const kErrorUnableToSave = @"E_UNABLE_TO_SAVE";
static NSString *const kErrorUnableToLoad = @"E_UNABLE_TO_LOAD";
RCT_EXPORT_METHOD(saveToCameraRoll:(NSURLRequest *)request RCT_EXPORT_METHOD(saveToCameraRoll:(NSURLRequest *)request
type:(NSString *)type type:(NSString *)type
resolve:(RCTPromiseResolveBlock)resolve resolve:(RCTPromiseResolveBlock)resolve
reject:(RCTPromiseRejectBlock)reject) reject:(RCTPromiseRejectBlock)reject)
{ {
__block PHObjectPlaceholder *placeholder;
// We load images and videos differently.
// Images have many custom loaders which can load images from ALAssetsLibrary URLs, PHPhotoLibrary
// URLs, `data:` URIs, etc. Video URLs are passed directly through for now; it may be nice to support
// more ways of loading videos in the future.
__block NSURL *inputURI = nil;
__block UIImage *inputImage = nil;
void (^saveBlock)(void) = ^void() {
// performChanges and the completionHandler are called on
// arbitrary threads, not the main thread - this is safe
// for now since all JS is queued and executed on a single thread.
// We should reevaluate this if that assumption changes.
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
PHAssetChangeRequest *changeRequest;
// Defaults to "photo". `type` is an optional param.
if ([type isEqualToString:@"video"]) {
changeRequest = [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:inputURI];
} else {
changeRequest = [PHAssetChangeRequest creationRequestForAssetFromImage:inputImage];
}
placeholder = [changeRequest placeholderForCreatedAsset];
} completionHandler:^(BOOL success, NSError * _Nullable error) {
if (success) {
NSString *uri = [NSString stringWithFormat:@"ph://%@", [placeholder localIdentifier]];
resolve(uri);
} else {
reject(kErrorUnableToSave, nil, error);
}
}];
};
if ([type isEqualToString:@"video"]) { if ([type isEqualToString:@"video"]) {
// It's unclear if writeVideoAtPathToSavedPhotosAlbum is thread-safe inputURI = request.URL;
dispatch_async(dispatch_get_main_queue(), ^{ saveBlock();
[self->_bridge.assetsLibrary writeVideoAtPathToSavedPhotosAlbum:request.URL completionBlock:^(NSURL *assetURL, NSError *saveError) {
if (saveError) {
reject(kErrorUnableToSave, nil, saveError);
} else {
resolve(assetURL.absoluteString);
}
}];
});
} else { } else {
[_bridge.imageLoader loadImageWithURLRequest:request [_bridge.imageLoader loadImageWithURLRequest:request callback:^(NSError *error, UIImage *image) {
callback:^(NSError *loadError, UIImage *loadedImage) { if (error) {
if (loadError) { reject(kErrorUnableToLoad, nil, error);
reject(kErrorUnableToLoad, nil, loadError);
return; return;
} }
// It's unclear if writeImageToSavedPhotosAlbum is thread-safe
dispatch_async(dispatch_get_main_queue(), ^{ inputImage = image;
[self->_bridge.assetsLibrary writeImageToSavedPhotosAlbum:loadedImage.CGImage metadata:nil completionBlock:^(NSURL *assetURL, NSError *saveError) { saveBlock();
if (saveError) {
RCTLogWarn(@"Error saving cropped image: %@", saveError);
reject(kErrorUnableToSave, nil, saveError);
} else {
resolve(assetURL.absoluteString);
}
}];
});
}]; }];
} }
} }
@ -181,89 +161,131 @@ RCT_EXPORT_METHOD(getPhotos:(NSDictionary *)params
{ {
checkPhotoLibraryConfig(); checkPhotoLibraryConfig();
ensureAssetsLibLoaded(); NSUInteger const first = [RCTConvert NSInteger:params[@"first"]];
NSUInteger first = [RCTConvert NSInteger:params[@"first"]]; NSString *const afterCursor = [RCTConvert NSString:params[@"after"]];
NSString *afterCursor = [RCTConvert NSString:params[@"after"]]; NSString *const groupName = [RCTConvert NSString:params[@"groupName"]];
NSString *groupName = [RCTConvert NSString:params[@"groupName"]]; NSString *const groupTypes = [[RCTConvert NSString:params[@"groupTypes"]] lowercaseString];
ALAssetsFilter *assetType = [RCTConvert ALAssetsFilter:params[@"assetType"]]; NSString *const mediaType = [RCTConvert NSString:params[@"assetType"]];
ALAssetsGroupType groupTypes = [RCTConvert ALAssetsGroupType:params[@"groupTypes"]]; NSArray<NSString *> *const mimeTypes = [RCTConvert NSStringArray:params[@"mimeTypes"]];
// If groupTypes is "all", we want to fetch the SmartAlbum "all photos". Otherwise, all
// other groupTypes values require the "album" collection type.
PHAssetCollectionType const collectionType = ([groupTypes isEqualToString:@"all"]
? PHAssetCollectionTypeSmartAlbum
: PHAssetCollectionTypeAlbum);
PHAssetCollectionSubtype const collectionSubtype = [RCTConvert PHAssetCollectionSubtype:groupTypes];
// Predicate for fetching assets within a collection
PHFetchOptions *const assetFetchOptions = [RCTConvert PHFetchOptionsFromMediaType:mediaType];
assetFetchOptions.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:NO]];
BOOL __block foundAfter = NO; BOOL __block foundAfter = NO;
BOOL __block hasNextPage = NO; BOOL __block hasNextPage = NO;
BOOL __block resolvedPromise = NO; BOOL __block resolvedPromise = NO;
NSMutableArray<NSDictionary<NSString *, id> *> *assets = [NSMutableArray new]; NSMutableArray<NSDictionary<NSString *, id> *> *assets = [NSMutableArray new];
[_bridge.assetsLibrary enumerateGroupsWithTypes:groupTypes usingBlock:^(ALAssetsGroup *group, BOOL *stopGroups) { // Filter collection name ("group")
if (group && (groupName == nil || [groupName isEqualToString:[group valueForProperty:_ALAssetsGroupPropertyName]])) { PHFetchOptions *const collectionFetchOptions = [PHFetchOptions new];
collectionFetchOptions.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"endDate" ascending:NO]];
if (groupName != nil) {
collectionFetchOptions.predicate = [NSPredicate predicateWithFormat:[NSString stringWithFormat:@"localizedTitle == '%@'", groupName]];
}
[group setAssetsFilter:assetType]; PHFetchResult<PHAssetCollection *> *const assetCollectionFetchResult = [PHAssetCollection fetchAssetCollectionsWithType:collectionType subtype:collectionSubtype options:collectionFetchOptions];
[group enumerateAssetsWithOptions:NSEnumerationReverse usingBlock:^(ALAsset *result, NSUInteger index, BOOL *stopAssets) { [assetCollectionFetchResult enumerateObjectsUsingBlock:^(PHAssetCollection * _Nonnull assetCollection, NSUInteger collectionIdx, BOOL * _Nonnull stopCollections) {
if (result) { // Enumerate assets within the collection
NSString *uri = ((NSURL *)[result valueForProperty:_ALAssetPropertyAssetURL]).absoluteString; PHFetchResult<PHAsset *> *const assetsFetchResult = [PHAsset fetchAssetsInAssetCollection:assetCollection options:assetFetchOptions];
if (afterCursor && !foundAfter) {
if ([afterCursor isEqualToString:uri]) {
foundAfter = YES;
}
return; // Skip until we get to the first one
}
if (first == assets.count) {
*stopAssets = YES;
*stopGroups = YES;
hasNextPage = YES;
RCTAssert(resolvedPromise == NO, @"Resolved the promise before we finished processing the results.");
RCTResolvePromise(resolve, assets, hasNextPage);
resolvedPromise = YES;
return;
}
CGSize dimensions = [result defaultRepresentation].dimensions;
CLLocation *loc = [result valueForProperty:_ALAssetPropertyLocation];
NSDate *date = [result valueForProperty:_ALAssetPropertyDate];
NSString *filename = [result defaultRepresentation].filename;
int64_t duration = 0;
if ([[result valueForProperty:_ALAssetPropertyType] isEqualToString:_ALAssetTypeVideo]) {
duration = [[result valueForProperty:_ALAssetPropertyDuration] intValue];
}
[assets addObject:@{ [assetsFetchResult enumerateObjectsUsingBlock:^(PHAsset * _Nonnull asset, NSUInteger assetIdx, BOOL * _Nonnull stopAssets) {
@"node": @{ NSString *const uri = [NSString stringWithFormat:@"ph://%@", [asset localIdentifier]];
@"type": [result valueForProperty:_ALAssetPropertyType], if (afterCursor && !foundAfter) {
@"group_name": [group valueForProperty:_ALAssetsGroupPropertyName], if ([afterCursor isEqualToString:uri]) {
@"image": @{ foundAfter = YES;
@"uri": uri,
@"filename" : filename ?: [NSNull null],
@"height": @(dimensions.height),
@"width": @(dimensions.width),
@"isStored": @YES,
@"playableDuration": @(duration),
},
@"timestamp": @(date.timeIntervalSince1970),
@"location": loc ? @{
@"latitude": @(loc.coordinate.latitude),
@"longitude": @(loc.coordinate.longitude),
@"altitude": @(loc.altitude),
@"heading": @(loc.course),
@"speed": @(loc.speed),
} : @{},
}
}];
} }
}]; return; // skip until we get to the first one
} }
if (!group) { // Get underlying resources of an asset - this includes files as well as details about edited PHAssets
// Sometimes the enumeration continues even if we set stop above, so we guard against resolving the promise if ([mimeTypes count] > 0) {
// multiple times here. NSArray<PHAssetResource *> *const assetResources = [PHAssetResource assetResourcesForAsset:asset];
if (!resolvedPromise) { if (![assetResources firstObject]) {
return;
}
PHAssetResource *const _Nonnull resource = [assetResources firstObject];
CFStringRef const uti = (__bridge CFStringRef _Nonnull)(resource.uniformTypeIdentifier);
NSString *const mimeType = (NSString *)CFBridgingRelease(UTTypeCopyPreferredTagWithClass(uti, kUTTagClassMIMEType));
BOOL __block mimeTypeFound = NO;
[mimeTypes enumerateObjectsUsingBlock:^(NSString * _Nonnull mimeTypeFilter, NSUInteger idx, BOOL * _Nonnull stop) {
if ([mimeType isEqualToString:mimeTypeFilter]) {
mimeTypeFound = YES;
*stop = YES;
}
}];
if (!mimeTypeFound) {
return;
}
}
// If we've accumulated enough results to resolve a single promise
if (first == assets.count) {
*stopAssets = YES;
*stopCollections = YES;
hasNextPage = YES;
RCTAssert(resolvedPromise == NO, @"Resolved the promise before we finished processing the results.");
RCTResolvePromise(resolve, assets, hasNextPage); RCTResolvePromise(resolve, assets, hasNextPage);
resolvedPromise = YES; resolvedPromise = YES;
return;
} }
}
} failureBlock:^(NSError *error) { NSString *const assetMediaTypeLabel = (asset.mediaType == PHAssetMediaTypeVideo
if (error.code != ALAssetsLibraryAccessUserDeniedError) { ? @"video"
RCTLogError(@"Failure while iterating through asset groups %@", error); : (asset.mediaType == PHAssetMediaTypeImage
} ? @"image"
reject(kErrorUnableToLoad, nil, error); : (asset.mediaType == PHAssetMediaTypeAudio
? @"audio"
: @"unknown")));
CLLocation *const loc = asset.location;
// A note on isStored: in the previous code that used ALAssets, isStored
// was always set to YES, probably because iCloud-synced images were never returned (?).
// To get the "isStored" information and filename, we would need to actually request the
// image data from the image manager. Those operations could get really expensive and
// would definitely utilize the disk too much.
// Thus, this field is actually not reliable.
// Note that Android also does not return the `isStored` field at all.
[assets addObject:@{
@"node": @{
@"type": assetMediaTypeLabel, // TODO: switch to mimeType?
@"group_name": [assetCollection localizedTitle],
@"image": @{
@"uri": uri,
@"height": @([asset pixelHeight]),
@"width": @([asset pixelWidth]),
@"isStored": @YES, // this field doesn't seem to exist on android
@"playableDuration": @([asset duration]) // fractional seconds
},
@"timestamp": @(asset.creationDate.timeIntervalSince1970),
@"location": (loc ? @{
@"latitude": @(loc.coordinate.latitude),
@"longitude": @(loc.coordinate.longitude),
@"altitude": @(loc.altitude),
@"heading": @(loc.course),
@"speed": @(loc.speed), // speed in m/s
} : @{})
}
}];
}];
}]; }];
// If we get this far and haven't resolved the promise yet, we reached the end of the list of photos
if (!resolvedPromise) {
hasNextPage = NO;
RCTResolvePromise(resolve, assets, hasNextPage);
resolvedPromise = YES;
}
} }
RCT_EXPORT_METHOD(deletePhotos:(NSArray<NSString *>*)assets RCT_EXPORT_METHOD(deletePhotos:(NSArray<NSString *>*)assets