mirror of
https://github.com/status-im/react-native-cameraroll.git
synced 2025-01-27 01:52:11 +00:00
* Fix #45 ordering of photos when groupTypes is All * Fix backwards compatible album name for all photos
This commit is contained in:
parent
2b6afeac3d
commit
f5973f8ebe
@ -215,96 +215,108 @@ RCT_EXPORT_METHOD(getPhotos:(NSDictionary *)params
|
||||
if (groupName != nil) {
|
||||
collectionFetchOptions.predicate = [NSPredicate predicateWithFormat:[NSString stringWithFormat:@"localizedTitle == '%@'", groupName]];
|
||||
}
|
||||
|
||||
BOOL __block stopCollections_;
|
||||
NSString __block *currentCollectionName;
|
||||
|
||||
requestPhotoLibraryAccess(reject, ^{
|
||||
PHFetchResult<PHAssetCollection *> *const assetCollectionFetchResult = [PHAssetCollection fetchAssetCollectionsWithType:collectionType subtype:collectionSubtype options:collectionFetchOptions];
|
||||
[assetCollectionFetchResult enumerateObjectsUsingBlock:^(PHAssetCollection * _Nonnull assetCollection, NSUInteger collectionIdx, BOOL * _Nonnull stopCollections) {
|
||||
// Enumerate assets within the collection
|
||||
PHFetchResult<PHAsset *> *const assetsFetchResult = [PHAsset fetchAssetsInAssetCollection:assetCollection options:assetFetchOptions];
|
||||
|
||||
[assetsFetchResult enumerateObjectsUsingBlock:^(PHAsset * _Nonnull asset, NSUInteger assetIdx, BOOL * _Nonnull stopAssets) {
|
||||
NSString *const uri = [NSString stringWithFormat:@"ph://%@", [asset localIdentifier]];
|
||||
if (afterCursor && !foundAfter) {
|
||||
if ([afterCursor isEqualToString:uri]) {
|
||||
foundAfter = YES;
|
||||
}
|
||||
return; // skip until we get to the first one
|
||||
void (^collectAsset)(PHAsset*, NSUInteger, BOOL*) = ^(PHAsset * _Nonnull asset, NSUInteger assetIdx, BOOL * _Nonnull stopAssets) {
|
||||
NSString *const uri = [NSString stringWithFormat:@"ph://%@", [asset localIdentifier]];
|
||||
if (afterCursor && !foundAfter) {
|
||||
if ([afterCursor isEqualToString:uri]) {
|
||||
foundAfter = YES;
|
||||
}
|
||||
return; // skip until we get to the first one
|
||||
}
|
||||
|
||||
// Get underlying resources of an asset - this includes files as well as details about edited PHAssets
|
||||
if ([mimeTypes count] > 0) {
|
||||
NSArray<PHAssetResource *> *const assetResources = [PHAssetResource assetResourcesForAsset:asset];
|
||||
if (![assetResources firstObject]) {
|
||||
return;
|
||||
}
|
||||
|
||||
PHAssetResource *const _Nonnull resource = [assetResources firstObject];
|
||||
CFStringRef const uti = (__bridge CFStringRef _Nonnull)(resource.uniformTypeIdentifier);
|
||||
NSString *const mimeType = (NSString *)CFBridgingRelease(UTTypeCopyPreferredTagWithClass(uti, kUTTagClassMIMEType));
|
||||
|
||||
BOOL __block mimeTypeFound = NO;
|
||||
[mimeTypes enumerateObjectsUsingBlock:^(NSString * _Nonnull mimeTypeFilter, NSUInteger idx, BOOL * _Nonnull stop) {
|
||||
if ([mimeType isEqualToString:mimeTypeFilter]) {
|
||||
mimeTypeFound = YES;
|
||||
*stop = YES;
|
||||
}
|
||||
}];
|
||||
|
||||
if (!mimeTypeFound) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// If we've accumulated enough results to resolve a single promise
|
||||
if (first == assets.count) {
|
||||
*stopAssets = YES;
|
||||
*stopCollections = YES;
|
||||
hasNextPage = YES;
|
||||
RCTAssert(resolvedPromise == NO, @"Resolved the promise before we finished processing the results.");
|
||||
RCTResolvePromise(resolve, assets, hasNextPage);
|
||||
resolvedPromise = YES;
|
||||
// Get underlying resources of an asset - this includes files as well as details about edited PHAssets
|
||||
if ([mimeTypes count] > 0) {
|
||||
NSArray<PHAssetResource *> *const assetResources = [PHAssetResource assetResourcesForAsset:asset];
|
||||
if (![assetResources firstObject]) {
|
||||
return;
|
||||
}
|
||||
|
||||
NSString *const assetMediaTypeLabel = (asset.mediaType == PHAssetMediaTypeVideo
|
||||
? @"video"
|
||||
: (asset.mediaType == PHAssetMediaTypeImage
|
||||
? @"image"
|
||||
: (asset.mediaType == PHAssetMediaTypeAudio
|
||||
? @"audio"
|
||||
: @"unknown")));
|
||||
CLLocation *const loc = asset.location;
|
||||
PHAssetResource *const _Nonnull resource = [assetResources firstObject];
|
||||
CFStringRef const uti = (__bridge CFStringRef _Nonnull)(resource.uniformTypeIdentifier);
|
||||
NSString *const mimeType = (NSString *)CFBridgingRelease(UTTypeCopyPreferredTagWithClass(uti, kUTTagClassMIMEType));
|
||||
|
||||
// A note on isStored: in the previous code that used ALAssets, isStored
|
||||
// was always set to YES, probably because iCloud-synced images were never returned (?).
|
||||
// To get the "isStored" information and filename, we would need to actually request the
|
||||
// image data from the image manager. Those operations could get really expensive and
|
||||
// would definitely utilize the disk too much.
|
||||
// Thus, this field is actually not reliable.
|
||||
// Note that Android also does not return the `isStored` field at all.
|
||||
[assets addObject:@{
|
||||
@"node": @{
|
||||
@"type": assetMediaTypeLabel, // TODO: switch to mimeType?
|
||||
@"group_name": [assetCollection localizedTitle],
|
||||
@"image": @{
|
||||
@"uri": uri,
|
||||
@"height": @([asset pixelHeight]),
|
||||
@"width": @([asset pixelWidth]),
|
||||
@"isStored": @YES, // this field doesn't seem to exist on android
|
||||
@"playableDuration": @([asset duration]) // fractional seconds
|
||||
},
|
||||
@"timestamp": @(asset.creationDate.timeIntervalSince1970),
|
||||
@"location": (loc ? @{
|
||||
@"latitude": @(loc.coordinate.latitude),
|
||||
@"longitude": @(loc.coordinate.longitude),
|
||||
@"altitude": @(loc.altitude),
|
||||
@"heading": @(loc.course),
|
||||
@"speed": @(loc.speed), // speed in m/s
|
||||
} : @{})
|
||||
}
|
||||
BOOL __block mimeTypeFound = NO;
|
||||
[mimeTypes enumerateObjectsUsingBlock:^(NSString * _Nonnull mimeTypeFilter, NSUInteger idx, BOOL * _Nonnull stop) {
|
||||
if ([mimeType isEqualToString:mimeTypeFilter]) {
|
||||
mimeTypeFound = YES;
|
||||
*stop = YES;
|
||||
}
|
||||
}];
|
||||
|
||||
if (!mimeTypeFound) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// If we've accumulated enough results to resolve a single promise
|
||||
if (first == assets.count) {
|
||||
*stopAssets = YES;
|
||||
stopCollections_ = YES;
|
||||
hasNextPage = YES;
|
||||
RCTAssert(resolvedPromise == NO, @"Resolved the promise before we finished processing the results.");
|
||||
RCTResolvePromise(resolve, assets, hasNextPage);
|
||||
resolvedPromise = YES;
|
||||
return;
|
||||
}
|
||||
|
||||
NSString *const assetMediaTypeLabel = (asset.mediaType == PHAssetMediaTypeVideo
|
||||
? @"video"
|
||||
: (asset.mediaType == PHAssetMediaTypeImage
|
||||
? @"image"
|
||||
: (asset.mediaType == PHAssetMediaTypeAudio
|
||||
? @"audio"
|
||||
: @"unknown")));
|
||||
CLLocation *const loc = asset.location;
|
||||
|
||||
// A note on isStored: in the previous code that used ALAssets, isStored
|
||||
// was always set to YES, probably because iCloud-synced images were never returned (?).
|
||||
// To get the "isStored" information and filename, we would need to actually request the
|
||||
// image data from the image manager. Those operations could get really expensive and
|
||||
// would definitely utilize the disk too much.
|
||||
// Thus, this field is actually not reliable.
|
||||
// Note that Android also does not return the `isStored` field at all.
|
||||
[assets addObject:@{
|
||||
@"node": @{
|
||||
@"type": assetMediaTypeLabel, // TODO: switch to mimeType?
|
||||
@"group_name": currentCollectionName,
|
||||
@"image": @{
|
||||
@"uri": uri,
|
||||
@"height": @([asset pixelHeight]),
|
||||
@"width": @([asset pixelWidth]),
|
||||
@"isStored": @YES, // this field doesn't seem to exist on android
|
||||
@"playableDuration": @([asset duration]) // fractional seconds
|
||||
},
|
||||
@"timestamp": @(asset.creationDate.timeIntervalSince1970),
|
||||
@"location": (loc ? @{
|
||||
@"latitude": @(loc.coordinate.latitude),
|
||||
@"longitude": @(loc.coordinate.longitude),
|
||||
@"altitude": @(loc.altitude),
|
||||
@"heading": @(loc.course),
|
||||
@"speed": @(loc.speed), // speed in m/s
|
||||
} : @{})
|
||||
}
|
||||
}];
|
||||
}];
|
||||
};
|
||||
|
||||
if ([groupTypes isEqualToString:@"all"]) {
|
||||
PHFetchResult <PHAsset *> *const assetFetchResult = [PHAsset fetchAssetsWithOptions: assetFetchOptions];
|
||||
currentCollectionName = @"All Photos";
|
||||
[assetFetchResult enumerateObjectsUsingBlock:collectAsset];
|
||||
} else {
|
||||
PHFetchResult<PHAssetCollection *> *const assetCollectionFetchResult = [PHAssetCollection fetchAssetCollectionsWithType:collectionType subtype:collectionSubtype options:collectionFetchOptions];
|
||||
[assetCollectionFetchResult enumerateObjectsUsingBlock:^(PHAssetCollection * _Nonnull assetCollection, NSUInteger collectionIdx, BOOL * _Nonnull stopCollections) {
|
||||
// Enumerate assets within the collection
|
||||
PHFetchResult<PHAsset *> *const assetsFetchResult = [PHAsset fetchAssetsInAssetCollection:assetCollection options:assetFetchOptions];
|
||||
currentCollectionName = [assetCollection localizedTitle];
|
||||
[assetsFetchResult enumerateObjectsUsingBlock:collectAsset];
|
||||
*stopCollections = stopCollections_;
|
||||
}];
|
||||
}
|
||||
|
||||
// If we get this far and haven't resolved the promise yet, we reached the end of the list of photos
|
||||
if (!resolvedPromise) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user