Fixed rotation and scaling issues when loading ALAssets using RCTImageLoader

This commit is contained in:
Nick Lockwood 2015-07-21 05:40:06 -07:00
parent 9c73e2ff7a
commit 85cb35c514
4 changed files with 90 additions and 71 deletions

View File

@ -23,7 +23,7 @@
13DB03481B5D2ED500C27245 /* RCTJSONTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 13DB03471B5D2ED500C27245 /* RCTJSONTests.m */; };
141FC1211B222EBB004D5FFB /* IntegrationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 141FC1201B222EBB004D5FFB /* IntegrationTests.m */; };
143BC5A11B21E45C00462512 /* UIExplorerSnapshotTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 143BC5A01B21E45C00462512 /* UIExplorerSnapshotTests.m */; };
144D21241B2204C5006DB32B /* RCTClippingTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 144D21231B2204C5006DB32B /* RCTClippingTests.m */; };
144D21241B2204C5006DB32B /* RCTClipRectTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 144D21231B2204C5006DB32B /* RCTClipRectTests.m */; };
147CED4C1AB3532B00DA3E4C /* libRCTActionSheet.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 147CED4B1AB34F8C00DA3E4C /* libRCTActionSheet.a */; };
1497CFAC1B21F5E400C1F8F2 /* RCTAllocationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 1497CFA41B21F5E400C1F8F2 /* RCTAllocationTests.m */; };
1497CFAD1B21F5E400C1F8F2 /* RCTBridgeTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 1497CFA51B21F5E400C1F8F2 /* RCTBridgeTests.m */; };
@ -187,7 +187,7 @@
143BC5951B21E3E100462512 /* UIExplorerIntegrationTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = UIExplorerIntegrationTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
143BC5981B21E3E100462512 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
143BC5A01B21E45C00462512 /* UIExplorerSnapshotTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = UIExplorerSnapshotTests.m; sourceTree = "<group>"; };
144D21231B2204C5006DB32B /* RCTClippingTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTClippingTests.m; sourceTree = "<group>"; };
144D21231B2204C5006DB32B /* RCTClipRectTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTClipRectTests.m; sourceTree = "<group>"; };
1497CFA41B21F5E400C1F8F2 /* RCTAllocationTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTAllocationTests.m; sourceTree = "<group>"; };
1497CFA51B21F5E400C1F8F2 /* RCTBridgeTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTBridgeTests.m; sourceTree = "<group>"; };
1497CFA61B21F5E400C1F8F2 /* RCTContextExecutorTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTContextExecutorTests.m; sourceTree = "<group>"; };
@ -353,7 +353,7 @@
1497CFA41B21F5E400C1F8F2 /* RCTAllocationTests.m */,
1497CFA51B21F5E400C1F8F2 /* RCTBridgeTests.m */,
138D6A151B53CD440074A87E /* RCTCacheTests.m */,
144D21231B2204C5006DB32B /* RCTClippingTests.m */,
144D21231B2204C5006DB32B /* RCTClipRectTests.m */,
1497CFA61B21F5E400C1F8F2 /* RCTContextExecutorTests.m */,
1497CFA71B21F5E400C1F8F2 /* RCTConvert_NSURLTests.m */,
1497CFA81B21F5E400C1F8F2 /* RCTConvert_UIFontTests.m */,
@ -787,7 +787,7 @@
buildActionMask = 2147483647;
files = (
1497CFB01B21F5E400C1F8F2 /* RCTConvert_UIFontTests.m in Sources */,
144D21241B2204C5006DB32B /* RCTClippingTests.m in Sources */,
144D21241B2204C5006DB32B /* RCTClipRectTests.m in Sources */,
1497CFB21B21F5E400C1F8F2 /* RCTSparseArrayTests.m in Sources */,
1300627F1B59179B0043FE5A /* RCTGzipTests.m in Sources */,
1497CFAF1B21F5E400C1F8F2 /* RCTConvert_NSURLTests.m in Sources */,

View File

@ -16,10 +16,7 @@
#import <Foundation/Foundation.h>
#import <UIKit/UIView.h>
#import <XCTest/XCTest.h>
extern CGRect RCTClipRect(CGSize contentSize, CGFloat contentScale,
CGSize targetSize, CGFloat targetScale,
UIViewContentMode resizeMode);
#import "RCTImageUtils.h"
#define RCTAssertEqualPoints(a, b) { \
XCTAssertEqual(a.x, b.x); \
@ -36,11 +33,11 @@ RCTAssertEqualPoints(a.origin, b.origin); \
RCTAssertEqualSizes(a.size, b.size); \
}
@interface ClippingTests : XCTestCase
@interface RCTClipRectTests : XCTestCase
@end
@implementation ClippingTests
@implementation RCTClipRectTests
- (void)testLandscapeSourceLandscapeTarget
{
@ -109,6 +106,18 @@ RCTAssertEqualSizes(a.size, b.size); \
{
CGRect expected = {{0, -37.5}, {10, 100}};
CGRect result = RCTClipRect(content, 2, target, 2, UIViewContentModeScaleAspectFill);
RCTAssertEqualRects(expected, result);
}
}
- (void)testRounding
{
CGSize content = {10, 100};
CGSize target = {20, 50};
{
CGRect expected = {{0, -38}, {10, 100}};
CGRect result = RCTClipRect(content, 1, target, 1, UIViewContentModeScaleAspectFill);
RCTAssertEqualRects(expected, result);
}

View File

@ -72,62 +72,48 @@ static dispatch_queue_t RCTImageLoaderQueue(void)
completionBlock:callback];
}
//
// Why use a custom scaling method:
// http://www.mindsea.com/2012/12/downscaling-huge-alassets-without-fear-of-sigkill/
// Greater efficiency, reduced memory overhead.
+ (UIImage *)scaledImageForAssetRepresentation:(ALAssetRepresentation *)representation
size:(CGSize)size
scale:(CGFloat)scale
orientation:(UIImageOrientation)orientation
// Why use a custom scaling method? Greater efficiency, reduced memory overhead:
// http://www.mindsea.com/2012/12/downscaling-huge-alassets-without-fear-of-sigkill
static UIImage *RCTScaledImageForAsset(ALAssetRepresentation *representation,
CGSize size, CGFloat scale,
UIViewContentMode resizeMode,
NSError **error)
{
UIImage *image = nil;
NSData *data = nil;
uint8_t *buffer = (uint8_t *)malloc(sizeof(uint8_t)*(NSUInteger)[representation size]);
if (buffer != NULL) {
NSError *error = nil;
NSUInteger bytesRead = [representation getBytes:buffer fromOffset:0 length:(NSUInteger)[representation size] error:&error];
data = [NSData dataWithBytes:buffer length:bytesRead];
free(buffer);
NSUInteger length = (NSUInteger)representation.size;
NSMutableData *data = [NSMutableData dataWithLength:length];
if (![representation getBytes:data.mutableBytes
fromOffset:0
length:length
error:error]) {
return nil;
}
if ([data length]) {
CGSize sourceSize = representation.dimensions;
CGRect targetRect = RCTClipRect(sourceSize, representation.scale, size, scale, resizeMode);
CGSize targetSize = targetRect.size;
NSDictionary *options = @{
(id)kCGImageSourceShouldAllowFloat: @YES,
(id)kCGImageSourceCreateThumbnailWithTransform: @YES,
(id)kCGImageSourceCreateThumbnailFromImageAlways: @YES,
(id)kCGImageSourceThumbnailMaxPixelSize: @(MAX(targetSize.width, targetSize.height) * scale)
};
CGImageSourceRef sourceRef = CGImageSourceCreateWithData((__bridge CFDataRef)data, nil);
NSMutableDictionary *options = [NSMutableDictionary dictionary];
CGSize source = representation.dimensions;
CGFloat mW = size.width / source.width;
CGFloat mH = size.height / source.height;
if (mH > mW) {
size.width = size.height / source.height * source.width;
} else if (mW > mH) {
size.height = size.width / source.width * source.height;
}
CGFloat maxPixelSize = MAX(size.width, size.height) * scale;
[options setObject:(id)kCFBooleanTrue forKey:(id)kCGImageSourceShouldAllowFloat];
[options setObject:(id)kCFBooleanTrue forKey:(id)kCGImageSourceCreateThumbnailWithTransform];
[options setObject:(id)kCFBooleanTrue forKey:(id)kCGImageSourceCreateThumbnailFromImageAlways];
[options setObject:(id)@(maxPixelSize) forKey:(id)kCGImageSourceThumbnailMaxPixelSize];
CGImageRef imageRef = CGImageSourceCreateThumbnailAtIndex(sourceRef, 0, (__bridge CFDictionaryRef)options);
if (imageRef) {
image = [UIImage imageWithCGImage:imageRef scale:[representation scale] orientation:orientation];
CGImageRelease(imageRef);
}
if (sourceRef) {
CFRelease(sourceRef);
}
if (imageRef) {
UIImage *image = [UIImage imageWithCGImage:imageRef scale:scale
orientation:(UIImageOrientation)representation.orientation];
CGImageRelease(imageRef);
return image;
}
return image;
return nil;
}
+ (RCTImageLoaderCancellationBlock)loadImageWithTag:(NSString *)imageTag
@ -139,7 +125,7 @@ static dispatch_queue_t RCTImageLoaderQueue(void)
completionBlock:(RCTImageLoaderCompletionBlock)completion
{
if ([imageTag hasPrefix:@"assets-library://"]) {
[[RCTImageLoader assetsLibrary] assetForURL:[NSURL URLWithString:imageTag] resultBlock:^(ALAsset *asset) {
[[self assetsLibrary] assetForURL:[NSURL URLWithString:imageTag] resultBlock:^(ALAsset *asset) {
if (asset) {
// ALAssetLibrary API is async and will be multi-threaded. Loading a few full
// resolution images at once will spike the memory up to store the image data,
@ -151,19 +137,19 @@ static dispatch_queue_t RCTImageLoaderQueue(void)
@autoreleasepool {
BOOL useMaximumSize = CGSizeEqualToSize(size, CGSizeZero);
ALAssetOrientation orientation = ALAssetOrientationUp;
ALAssetRepresentation *representation = [asset defaultRepresentation];
UIImage *image;
NSError *error = nil;
if (useMaximumSize) {
image = [UIImage imageWithCGImage:representation.fullResolutionImage scale:scale orientation:(UIImageOrientation)orientation];
image = [UIImage imageWithCGImage:representation.fullResolutionImage
scale:scale
orientation:(UIImageOrientation)representation.orientation];
} else {
image = [self scaledImageForAssetRepresentation:representation size:size scale:scale orientation:(UIImageOrientation)orientation];
image = RCTScaledImageForAsset(representation, size, scale, resizeMode, &error);
}
RCTDispatchCallbackOnMainQueue(completion, nil, image);
RCTDispatchCallbackOnMainQueue(completion, error, image);
}
});
} else {

View File

@ -11,6 +11,24 @@
#import "RCTLog.h"
static CGFloat RCTCeilValue(CGFloat value, CGFloat scale)
{
return ceil(value * scale) / scale;
}
static CGFloat RCTFloorValue(CGFloat value, CGFloat scale)
{
return floor(value * scale) / scale;
}
static CGSize RCTCeilSize(CGSize size, CGFloat scale)
{
return (CGSize){
RCTCeilValue(size.width, scale),
RCTCeilValue(size.height, scale)
};
}
CGSize RCTTargetSizeForClipRect(CGRect clipRect)
{
return (CGSize){
@ -48,7 +66,7 @@ CGRect RCTClipRect(CGSize sourceSize, CGFloat sourceScale,
sourceSize.width = MIN(destSize.width, sourceSize.width);
sourceSize.height = MIN(destSize.height, sourceSize.height);
return (CGRect){CGPointZero, sourceSize};
return (CGRect){CGPointZero, RCTCeilSize(sourceSize, destScale)};
case UIViewContentModeScaleAspectFit: // contain
@ -62,7 +80,7 @@ CGRect RCTClipRect(CGSize sourceSize, CGFloat sourceScale,
sourceSize.height = destSize.height = MIN(sourceSize.height, destSize.height);
sourceSize.width = sourceSize.height * aspect;
}
return (CGRect){CGPointZero, sourceSize};
return (CGRect){CGPointZero, RCTCeilSize(sourceSize, destScale)};
case UIViewContentModeScaleAspectFill: // cover
@ -71,20 +89,26 @@ CGRect RCTClipRect(CGSize sourceSize, CGFloat sourceScale,
sourceSize.height = destSize.height = MIN(sourceSize.height, destSize.height);
sourceSize.width = sourceSize.height * aspect;
destSize.width = destSize.height * targetAspect;
return (CGRect){{(destSize.width - sourceSize.width) / 2, 0}, sourceSize};
return (CGRect){
{RCTFloorValue((destSize.width - sourceSize.width) / 2, destScale), 0},
RCTCeilSize(sourceSize, destScale)
};
} else { // target is wider than content
sourceSize.width = destSize.width = MIN(sourceSize.width, destSize.width);
sourceSize.height = sourceSize.width / aspect;
destSize.height = destSize.width / targetAspect;
return (CGRect){{0, (destSize.height - sourceSize.height) / 2}, sourceSize};
return (CGRect){
{0, RCTFloorValue((destSize.height - sourceSize.height) / 2, destScale)},
RCTCeilSize(sourceSize, destScale)
};
}
default:
RCTLogError(@"A resizeMode value of %zd is not supported", resizeMode);
return (CGRect){CGPointZero, destSize};
return (CGRect){CGPointZero, RCTCeilSize(destSize, destScale)};
}
}