mirror of https://github.com/status-im/metro.git
Remove GlobalTransformCache.js
Reviewed By: davidaurelio Differential Revision: D7628728 fbshipit-source-id: 6b0c54e3e84e74cf531c17b4644da382e35f3ed4
This commit is contained in:
parent
084ac30863
commit
df53fb3de5
|
@ -1,510 +0,0 @@
|
||||||
/**
|
|
||||||
* Copyright (c) 2016-present, Facebook, Inc.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
* @format
|
|
||||||
*/
|
|
||||||
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
/* global Buffer: true */
|
|
||||||
|
|
||||||
const BatchProcessor = require('./BatchProcessor');
|
|
||||||
const FetchError = require('node-fetch/lib/fetch-error');
|
|
||||||
|
|
||||||
const crypto = require('crypto');
|
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const jsonStableStringify = require('json-stable-stringify');
|
|
||||||
const path = require('path');
|
|
||||||
const throat = require('throat');
|
|
||||||
|
|
||||||
import type {
|
|
||||||
CustomTransformOptions,
|
|
||||||
Options as TransformWorkerOptions,
|
|
||||||
TransformOptionsStrict,
|
|
||||||
} from '../JSTransformer/worker';
|
|
||||||
import type {LocalPath} from '../node-haste/lib/toLocalPath';
|
|
||||||
import type {CachedResult, GetTransformCacheKey} from './TransformCaching';
|
|
||||||
import type {Agent as HttpAgent} from 'http';
|
|
||||||
import type {Agent as HttpsAgent} from 'https';
|
|
||||||
|
|
||||||
type FetchOptions = {agent?: ?HttpAgent | HttpsAgent};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The API that a global transform cache must comply with. To implement a
|
|
||||||
* custom cache, implement this interface and pass it as argument to the
|
|
||||||
* application's top-level `Server` class.
|
|
||||||
*/
|
|
||||||
export type GlobalTransformCache = {
|
|
||||||
keyOf(props: FetchProps): string,
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Synchronously determine if it is worth trying to fetch a result from the
|
|
||||||
* cache. This can be used, for instance, to exclude sets of options we know
|
|
||||||
* will never be cached.
|
|
||||||
*/
|
|
||||||
shouldFetch(props: FetchProps): boolean,
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Try to fetch a result. It doesn't actually need to fetch from a server,
|
|
||||||
* the global cache could be instantiated locally for example.
|
|
||||||
*/
|
|
||||||
fetch(key: string): Promise<?CachedResult>,
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Try to store a result. Callsites won't necessarily wait for the success or
|
|
||||||
* failure of the Promise, so errors may be handled internally, they may
|
|
||||||
* otherwise be silently ignored. The actual storage operation could be done
|
|
||||||
* at a later point if desired. It is recommended to have this function be a
|
|
||||||
* no-op in production, and only do the storage operation from a script
|
|
||||||
* running on a Continuous Integration platform.
|
|
||||||
*/
|
|
||||||
store(key: string, result: CachedResult): Promise<void>,
|
|
||||||
};
|
|
||||||
|
|
||||||
type FetchResultURIs = (keys: Array<string>) => Promise<Map<string, string>>;
|
|
||||||
type FetchResultFromURI = (uri: string) => Promise<?CachedResult>;
|
|
||||||
type StoreResults = (resultsByKey: Map<string, CachedResult>) => Promise<void>;
|
|
||||||
|
|
||||||
export type FetchProps = {
|
|
||||||
+localPath: LocalPath,
|
|
||||||
+sourceCode: string,
|
|
||||||
+getTransformCacheKey: GetTransformCacheKey,
|
|
||||||
+transformOptions: TransformWorkerOptions,
|
|
||||||
};
|
|
||||||
|
|
||||||
type URI = string;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* We aggregate the requests to do a single request for many keys. It also
|
|
||||||
* ensures we do a single request at a time to avoid pressuring the I/O.
|
|
||||||
*/
|
|
||||||
class KeyURIFetcher {
|
|
||||||
_batchProcessor: BatchProcessor<string, ?URI>;
|
|
||||||
_fetchResultURIs: FetchResultURIs;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* When a batch request fails for some reason, we process the error locally
|
|
||||||
* and we proceed as if there were no result for these keys instead. That way
|
|
||||||
* a build will not fail just because of the cache.
|
|
||||||
*/
|
|
||||||
async _processKeys(keys: Array<string>): Promise<Array<?URI>> {
|
|
||||||
const URIsByKey = await this._fetchResultURIs(keys);
|
|
||||||
return keys.map(key => URIsByKey.get(key));
|
|
||||||
}
|
|
||||||
|
|
||||||
async fetch(key: string): Promise<?string> {
|
|
||||||
return await this._batchProcessor.queue(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
constructor(fetchResultURIs: FetchResultURIs) {
|
|
||||||
this._fetchResultURIs = fetchResultURIs;
|
|
||||||
this._batchProcessor = new BatchProcessor(
|
|
||||||
{
|
|
||||||
maximumDelayMs: 10,
|
|
||||||
maximumItems: 500,
|
|
||||||
concurrency: 2,
|
|
||||||
},
|
|
||||||
this._processKeys.bind(this),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type KeyedResult = {key: string, result: CachedResult};
|
|
||||||
|
|
||||||
class KeyResultStore {
|
|
||||||
_storeResults: StoreResults;
|
|
||||||
_batchProcessor: BatchProcessor<KeyedResult, void>;
|
|
||||||
_promises: Array<Promise<void>>;
|
|
||||||
|
|
||||||
async _processResults(keyResults: Array<KeyedResult>): Promise<Array<void>> {
|
|
||||||
const resultsByKey = new Map(
|
|
||||||
keyResults.map(pair => [pair.key, pair.result]),
|
|
||||||
);
|
|
||||||
await this._storeResults(resultsByKey);
|
|
||||||
return new Array(keyResults.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
async store(key: string, result: CachedResult): Promise<void> {
|
|
||||||
await this._batchProcessor.queue({key, result});
|
|
||||||
}
|
|
||||||
|
|
||||||
constructor(storeResults: StoreResults) {
|
|
||||||
this._storeResults = storeResults;
|
|
||||||
this._batchProcessor = new BatchProcessor(
|
|
||||||
{
|
|
||||||
maximumDelayMs: 1000,
|
|
||||||
maximumItems: 100,
|
|
||||||
concurrency: 10,
|
|
||||||
},
|
|
||||||
this._processResults.bind(this),
|
|
||||||
);
|
|
||||||
this._promises = [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export type TransformProfile = {
|
|
||||||
+dev: boolean,
|
|
||||||
+minify: boolean,
|
|
||||||
+platform: ?string,
|
|
||||||
};
|
|
||||||
|
|
||||||
function profileKey({dev, platform}: TransformProfile): string {
|
|
||||||
return jsonStableStringify({dev, platform});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* We avoid doing any request to the server if we know the server is not
|
|
||||||
* going to have any key at all for a particular set of transform options.
|
|
||||||
*/
|
|
||||||
class TransformProfileSet {
|
|
||||||
_profileKeys: Set<string>;
|
|
||||||
constructor(profiles: Iterable<TransformProfile>) {
|
|
||||||
this._profileKeys = new Set();
|
|
||||||
for (const profile of profiles) {
|
|
||||||
this._profileKeys.add(profileKey(profile));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
has(profile: TransformProfile): boolean {
|
|
||||||
return this._profileKeys.has(profileKey(profile));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type FetchFailedDetails =
|
|
||||||
| {
|
|
||||||
+statusCode: number,
|
|
||||||
+statusText: string,
|
|
||||||
+type: 'unhandled_http_status',
|
|
||||||
+uri: string,
|
|
||||||
}
|
|
||||||
| {+type: 'invalid_data'}
|
|
||||||
| {+type: 'invalid_key_data', key: string};
|
|
||||||
|
|
||||||
class FetchFailedError extends Error {
|
|
||||||
/** Separate object for details allows us to have a type union. */
|
|
||||||
+details: FetchFailedDetails;
|
|
||||||
|
|
||||||
constructor(details: FetchFailedDetails) {
|
|
||||||
super(FetchFailedError._getMessage(details));
|
|
||||||
(this: any).details = details;
|
|
||||||
}
|
|
||||||
|
|
||||||
static _getMessage(details: FetchFailedDetails): string {
|
|
||||||
if (details.type === 'unhandled_http_status') {
|
|
||||||
return (
|
|
||||||
`Unexpected HTTP status: ${details.statusCode} ` +
|
|
||||||
JSON.stringify(details.statusText) +
|
|
||||||
` while fetching \`${details.uri}\``
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (details.type === 'invalid_key_data') {
|
|
||||||
return `Invalid data was returned for key \`${details.key}\``;
|
|
||||||
}
|
|
||||||
return `Invalid or empty data was returned.`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* For some reason the result stored by the server for a key might mismatch what
|
|
||||||
* we expect a result to be. So we need to verify carefully the data.
|
|
||||||
*/
|
|
||||||
function validateCachedResult(cachedResult: mixed): ?CachedResult {
|
|
||||||
if (
|
|
||||||
cachedResult != null &&
|
|
||||||
typeof cachedResult === 'object' &&
|
|
||||||
typeof cachedResult.code === 'string' &&
|
|
||||||
Array.isArray(cachedResult.dependencies) &&
|
|
||||||
cachedResult.dependencies.every(dep => typeof dep === 'string')
|
|
||||||
) {
|
|
||||||
return (cachedResult: any);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
class URIBasedGlobalTransformCache {
|
|
||||||
_fetcher: KeyURIFetcher;
|
|
||||||
_fetchResultFromURI: FetchResultFromURI;
|
|
||||||
_profileSet: TransformProfileSet;
|
|
||||||
_optionsHasher: OptionsHasher;
|
|
||||||
_store: ?KeyResultStore;
|
|
||||||
|
|
||||||
static FetchFailedError: Class<FetchFailedError>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* For using the global cache one needs to have some kind of central key-value
|
|
||||||
* store that gets prefilled using keyOf() and the transformed results. The
|
|
||||||
* fetching function should provide a mapping of keys to URIs. The files
|
|
||||||
* referred by these URIs contains the transform results. Using URIs instead
|
|
||||||
* of returning the content directly allows for independent and parallel
|
|
||||||
* fetching of each result, that may be arbitrarily large JSON blobs.
|
|
||||||
*/
|
|
||||||
constructor(props: {
|
|
||||||
fetchResultFromURI: FetchResultFromURI,
|
|
||||||
fetchResultURIs: FetchResultURIs,
|
|
||||||
profiles: Iterable<TransformProfile>,
|
|
||||||
rootPath: string,
|
|
||||||
storeResults: StoreResults | null,
|
|
||||||
}) {
|
|
||||||
this._fetcher = new KeyURIFetcher(props.fetchResultURIs);
|
|
||||||
this._profileSet = new TransformProfileSet(props.profiles);
|
|
||||||
this._fetchResultFromURI = props.fetchResultFromURI;
|
|
||||||
this._optionsHasher = new OptionsHasher(props.rootPath);
|
|
||||||
if (props.storeResults != null) {
|
|
||||||
this._store = new KeyResultStore(props.storeResults);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a key for identifying uniquely a source file.
|
|
||||||
*/
|
|
||||||
keyOf(props: FetchProps) {
|
|
||||||
const hash = crypto.createHash('sha1');
|
|
||||||
const {sourceCode, localPath, transformOptions} = props;
|
|
||||||
hash.update(
|
|
||||||
this._optionsHasher.getTransformWorkerOptionsDigest(transformOptions),
|
|
||||||
);
|
|
||||||
const cacheKey = props.getTransformCacheKey(transformOptions);
|
|
||||||
hash.update(JSON.stringify(cacheKey));
|
|
||||||
hash.update(JSON.stringify(localPath));
|
|
||||||
hash.update(
|
|
||||||
crypto
|
|
||||||
.createHash('sha1')
|
|
||||||
.update(sourceCode)
|
|
||||||
.digest('hex'),
|
|
||||||
);
|
|
||||||
const digest = hash.digest('hex');
|
|
||||||
return `${digest}-${path.basename(localPath)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* We may want to improve that logic to return a stream instead of the whole
|
|
||||||
* blob of transformed results. However the results are generally only a few
|
|
||||||
* megabytes each.
|
|
||||||
*/
|
|
||||||
static async _fetchResultFromURI(
|
|
||||||
uri: string,
|
|
||||||
options: FetchOptions,
|
|
||||||
): Promise<CachedResult> {
|
|
||||||
const response = await fetch(uri, {
|
|
||||||
agent: options.agent,
|
|
||||||
method: 'GET',
|
|
||||||
timeout: 8000,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.status !== 200) {
|
|
||||||
throw new FetchFailedError({
|
|
||||||
statusCode: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
type: 'unhandled_http_status',
|
|
||||||
uri,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const unvalidatedResult = await response.json();
|
|
||||||
const result = validateCachedResult(unvalidatedResult);
|
|
||||||
if (result == null) {
|
|
||||||
throw new FetchFailedError({type: 'invalid_data'});
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* It happens from time to time that a fetch fails, we want to try these again
|
|
||||||
* a second time if we expect them to be transient. We might even consider
|
|
||||||
* waiting a little time before retring if experience shows it's useful.
|
|
||||||
*/
|
|
||||||
static _fetchResultFromURIWithRetry(
|
|
||||||
uri: string,
|
|
||||||
options?: FetchOptions = {},
|
|
||||||
): Promise<CachedResult> {
|
|
||||||
return URIBasedGlobalTransformCache._fetchResultFromURI(uri, options).catch(
|
|
||||||
error => {
|
|
||||||
if (!URIBasedGlobalTransformCache.shouldRetryAfterThatError(error)) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
return URIBasedGlobalTransformCache._fetchResultFromURI(uri, options);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The exposed version uses throat() to limit concurrency, as making too many parallel requests
|
|
||||||
* is more likely to trigger server-side throttling and cause timeouts.
|
|
||||||
*/
|
|
||||||
static fetchResultFromURI: (
|
|
||||||
uri: string,
|
|
||||||
options?: FetchOptions,
|
|
||||||
) => Promise<CachedResult>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* We want to retry timeouts as they're likely temporary. We retry 503
|
|
||||||
* (Service Unavailable) and 502 (Bad Gateway) because they may be caused by a
|
|
||||||
* some rogue server, or because of throttling.
|
|
||||||
*
|
|
||||||
* There may be other types of error we'd want to retry for, but these are
|
|
||||||
* the ones we experienced the most in practice.
|
|
||||||
*/
|
|
||||||
static shouldRetryAfterThatError(error: mixed): boolean {
|
|
||||||
return (
|
|
||||||
(error instanceof FetchError && error.type === 'request-timeout') ||
|
|
||||||
(error instanceof FetchFailedError &&
|
|
||||||
error.details.type === 'unhandled_http_status' &&
|
|
||||||
(error.details.statusCode === 503 || error.details.statusCode === 502))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
shouldFetch(props: FetchProps): boolean {
|
|
||||||
return this._profileSet.has(props.transformOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This may return `null` if either the cache doesn't have a value for that
|
|
||||||
* key yet, or an error happened, processed separately.
|
|
||||||
*/
|
|
||||||
async fetch(key: string): Promise<?CachedResult> {
|
|
||||||
const uri = await this._fetcher.fetch(key);
|
|
||||||
if (uri == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return await this._fetchResultFromURI(uri);
|
|
||||||
}
|
|
||||||
|
|
||||||
async store(key: string, result: CachedResult): Promise<void> {
|
|
||||||
if (this._store != null) {
|
|
||||||
await this._store.store(key, result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
URIBasedGlobalTransformCache.fetchResultFromURI = throat(
|
|
||||||
500,
|
|
||||||
URIBasedGlobalTransformCache._fetchResultFromURIWithRetry,
|
|
||||||
);
|
|
||||||
|
|
||||||
class OptionsHasher {
|
|
||||||
_rootPath: string;
|
|
||||||
_cache: WeakMap<TransformWorkerOptions, string>;
|
|
||||||
|
|
||||||
constructor(rootPath: string) {
|
|
||||||
this._rootPath = rootPath;
|
|
||||||
this._cache = new WeakMap();
|
|
||||||
}
|
|
||||||
|
|
||||||
getTransformWorkerOptionsDigest(options: TransformWorkerOptions): string {
|
|
||||||
const digest = this._cache.get(options);
|
|
||||||
if (digest != null) {
|
|
||||||
return digest;
|
|
||||||
}
|
|
||||||
const hash = crypto.createHash('sha1');
|
|
||||||
this.hashTransformWorkerOptions(hash, options);
|
|
||||||
const newDigest = hash.digest('hex');
|
|
||||||
this._cache.set(options, newDigest);
|
|
||||||
return newDigest;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function is extra-conservative with how it hashes the transform
|
|
||||||
* options. In particular:
|
|
||||||
*
|
|
||||||
* * we need to hash paths as local paths, i.e. relative to the root, not
|
|
||||||
* the absolute paths, otherwise everyone would have a different cache,
|
|
||||||
* defeating the purpose of global cache;
|
|
||||||
* * we need to reject any additional field we do not know of, because
|
|
||||||
* they could contain absolute path, and we absolutely want to process
|
|
||||||
* these.
|
|
||||||
*
|
|
||||||
* Theorically, Flow could help us prevent any other field from being here by
|
|
||||||
* using *exact* object type. In practice, the transform options are a mix of
|
|
||||||
* many different fields including the optional Babel fields, and some serious
|
|
||||||
* cleanup will be necessary to enable rock-solid typing.
|
|
||||||
*/
|
|
||||||
hashTransformWorkerOptions(
|
|
||||||
hash: crypto$Hash,
|
|
||||||
transform: TransformWorkerOptions,
|
|
||||||
): crypto$Hash {
|
|
||||||
return this.hashTransformOptions(hash, transform);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The transform options contain absolute paths. This can contain, for
|
|
||||||
* example, the username if someone works their home directory (very likely).
|
|
||||||
* We get rid of this local data for the global cache, otherwise nobody would
|
|
||||||
* share the same cache keys. The project roots should not be needed as part
|
|
||||||
* of the cache key as they should not affect the transformation of a single
|
|
||||||
* particular file.
|
|
||||||
*/
|
|
||||||
hashTransformOptions(
|
|
||||||
hash: crypto$Hash,
|
|
||||||
options: TransformOptionsStrict,
|
|
||||||
): crypto$Hash {
|
|
||||||
const {
|
|
||||||
assetDataPlugins,
|
|
||||||
customTransformOptions,
|
|
||||||
enableBabelRCLookup,
|
|
||||||
dev,
|
|
||||||
hot,
|
|
||||||
inlineRequires,
|
|
||||||
minify,
|
|
||||||
platform,
|
|
||||||
projectRoot,
|
|
||||||
...unknowns
|
|
||||||
} = options;
|
|
||||||
const unknownKeys = Object.keys(unknowns);
|
|
||||||
if (unknownKeys.length > 0) {
|
|
||||||
const message = `these transform option fields are unknown: ${JSON.stringify(
|
|
||||||
unknownKeys,
|
|
||||||
)}`;
|
|
||||||
throw new CannotHashOptionsError(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* eslint-disable no-bitwise */
|
|
||||||
hash.update(
|
|
||||||
new Buffer([
|
|
||||||
+dev |
|
|
||||||
(+hot << 2) |
|
|
||||||
(+inlineRequires << 3) |
|
|
||||||
(+enableBabelRCLookup << 4) |
|
|
||||||
(+minify << 5),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
|
|
||||||
/* eslint-enable no-bitwise */
|
|
||||||
hash.update(JSON.stringify(assetDataPlugins));
|
|
||||||
hash.update(JSON.stringify(platform));
|
|
||||||
hash.update(JSON.stringify(this.toLocalPath(projectRoot)));
|
|
||||||
hash.update(
|
|
||||||
JSON.stringify(this.sortTransformOptions(customTransformOptions || {})),
|
|
||||||
);
|
|
||||||
|
|
||||||
return hash;
|
|
||||||
}
|
|
||||||
|
|
||||||
pathsToLocal(filePaths: Array<string>): Array<string> {
|
|
||||||
return filePaths.map(this.toLocalPath, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
toLocalPath(filePath: string): string {
|
|
||||||
return path.relative(this._rootPath, filePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
sortTransformOptions(
|
|
||||||
options: CustomTransformOptions,
|
|
||||||
): Array<[string, mixed]> {
|
|
||||||
return Object.keys(options)
|
|
||||||
.sort()
|
|
||||||
.map(key => [key, options[key]]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class CannotHashOptionsError extends Error {
|
|
||||||
constructor(message: string) {
|
|
||||||
super();
|
|
||||||
this.message = message;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
URIBasedGlobalTransformCache.FetchFailedError = FetchFailedError;
|
|
||||||
|
|
||||||
module.exports = {URIBasedGlobalTransformCache, CannotHashOptionsError};
|
|
|
@ -1,102 +0,0 @@
|
||||||
/**
|
|
||||||
* Copyright (c) 2016-present, Facebook, Inc.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the MIT license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @format
|
|
||||||
* @emails oncall+js_foundation
|
|
||||||
*/
|
|
||||||
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
jest.useRealTimers();
|
|
||||||
|
|
||||||
const mockFetch = jest.fn();
|
|
||||||
jest.mock('node-fetch', () => mockFetch);
|
|
||||||
|
|
||||||
const {URIBasedGlobalTransformCache} = require('../GlobalTransformCache');
|
|
||||||
const FetchError = require('node-fetch/lib/fetch-error');
|
|
||||||
|
|
||||||
const getTransformOptions = require('../../__fixtures__/getTransformOptions');
|
|
||||||
|
|
||||||
async function fetchResultURIs(
|
|
||||||
keys: Array<string>,
|
|
||||||
): Promise<Map<string, string>> {
|
|
||||||
return new Map(keys.map(key => [key, `http://globalcache.com/${key}`]));
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchResultFromURI(uri: string): Promise<?CachedResult> {
|
|
||||||
return {
|
|
||||||
code: `/* code from ${uri} */`,
|
|
||||||
dependencies: [],
|
|
||||||
dependencyOffsets: [],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('GlobalTransformCache', () => {
|
|
||||||
it('fetches results', async () => {
|
|
||||||
const cache = new URIBasedGlobalTransformCache({
|
|
||||||
fetchResultFromURI,
|
|
||||||
fetchResultURIs,
|
|
||||||
profiles: [{dev: true, minify: false, platform: 'ios'}],
|
|
||||||
rootPath: '/root',
|
|
||||||
storeResults: null,
|
|
||||||
});
|
|
||||||
const transformOptions = await getTransformOptions();
|
|
||||||
|
|
||||||
const result = await Promise.all([
|
|
||||||
cache.fetch(
|
|
||||||
cache.keyOf({
|
|
||||||
localPath: 'some/where/foo.js',
|
|
||||||
sourceCode: '/* beep */',
|
|
||||||
getTransformCacheKey: () => 'abcd',
|
|
||||||
transformOptions,
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
cache.fetch(
|
|
||||||
cache.keyOf({
|
|
||||||
localPath: 'some/where/else/bar.js',
|
|
||||||
sourceCode: '/* boop */',
|
|
||||||
getTransformCacheKey: () => 'abcd',
|
|
||||||
transformOptions,
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
]);
|
|
||||||
expect(result).toMatchSnapshot();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('fetchResultFromURI', () => {
|
|
||||||
const defaultFetchMockImpl = async uri => ({
|
|
||||||
status: 200,
|
|
||||||
json: async () => ({
|
|
||||||
code: `/* code from ${uri} */`,
|
|
||||||
dependencies: [],
|
|
||||||
dependencyOffsets: [],
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
mockFetch.mockReset();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('fetches result', async () => {
|
|
||||||
mockFetch.mockImplementation(defaultFetchMockImpl);
|
|
||||||
const result = await URIBasedGlobalTransformCache.fetchResultFromURI(
|
|
||||||
'http://globalcache.com/foo',
|
|
||||||
);
|
|
||||||
expect(result).toMatchSnapshot();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('retries once on timeout', async () => {
|
|
||||||
mockFetch.mockImplementation(async uri => {
|
|
||||||
mockFetch.mockImplementation(defaultFetchMockImpl);
|
|
||||||
throw new FetchError('timeout!', 'request-timeout');
|
|
||||||
});
|
|
||||||
const result = await URIBasedGlobalTransformCache.fetchResultFromURI(
|
|
||||||
'http://globalcache.com/foo',
|
|
||||||
);
|
|
||||||
expect(result).toMatchSnapshot();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,32 +0,0 @@
|
||||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
|
||||||
|
|
||||||
exports[`GlobalTransformCache fetchResultFromURI fetches result 1`] = `
|
|
||||||
Object {
|
|
||||||
"code": "/* code from http://globalcache.com/foo */",
|
|
||||||
"dependencies": Array [],
|
|
||||||
"dependencyOffsets": Array [],
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
exports[`GlobalTransformCache fetchResultFromURI retries once on timeout 1`] = `
|
|
||||||
Object {
|
|
||||||
"code": "/* code from http://globalcache.com/foo */",
|
|
||||||
"dependencies": Array [],
|
|
||||||
"dependencyOffsets": Array [],
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
exports[`GlobalTransformCache fetches results 1`] = `
|
|
||||||
Array [
|
|
||||||
Object {
|
|
||||||
"code": "/* code from http://globalcache.com/3b3b861b6b80dd038c51262ca8b8b9f76e353ada-foo.js */",
|
|
||||||
"dependencies": Array [],
|
|
||||||
"dependencyOffsets": Array [],
|
|
||||||
},
|
|
||||||
Object {
|
|
||||||
"code": "/* code from http://globalcache.com/087b5bc3467f9a0670e49ce9118e6f6791aa12c6-bar.js */",
|
|
||||||
"dependencies": Array [],
|
|
||||||
"dependencyOffsets": Array [],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
`;
|
|
Loading…
Reference in New Issue