packager: move error handling from Module to GlobalTransformCache

Reviewed By: davidaurelio

Differential Revision: D4449685

fbshipit-source-id: 4f57cfe132036f476e36933bd2ffcb9f23c42ccc
This commit is contained in:
Jean Lauliac 2017-01-24 03:34:27 -08:00 committed by Facebook Github Bot
parent 3a98e93069
commit 83ed3acc58
3 changed files with 58 additions and 27 deletions

View File

@ -20,6 +20,7 @@ const request = require('request');
import type {Options as TransformOptions} from '../JSTransformer/worker/worker'; import type {Options as TransformOptions} from '../JSTransformer/worker/worker';
import type {CachedResult} from './TransformCache'; import type {CachedResult} from './TransformCache';
import type {Reporter} from './reporting';
type FetchResultURIs = ( type FetchResultURIs = (
keys: Array<string>, keys: Array<string>,
@ -38,7 +39,7 @@ type FetchProps = {
transformOptions: TransformOptions, transformOptions: TransformOptions,
}; };
type FetchCallback = (error?: Error, resultURI?: ?CachedResult) => mixed; type FetchCallback = (error?: Error, result?: ?CachedResult) => mixed;
type FetchURICallback = (error?: Error, resultURI?: ?string) => mixed; type FetchURICallback = (error?: Error, resultURI?: ?string) => mixed;
type ProcessBatch<TItem, TResult> = ( type ProcessBatch<TItem, TResult> = (
@ -135,16 +136,25 @@ type URI = string;
*/ */
class KeyURIFetcher { class KeyURIFetcher {
_fetchResultURIs: FetchResultURIs;
_batchProcessor: BatchProcessor<string, ?URI>; _batchProcessor: BatchProcessor<string, ?URI>;
_fetchResultURIs: FetchResultURIs;
_processError: (error: Error) => mixed;
/**
* When a batch request fails for some reason, we process the error locally
* and we proceed as if there were no result for these keys instead. That way
* a build will not fail just because of the cache.
*/
_processKeys( _processKeys(
keys: Array<string>, keys: Array<string>,
callback: (error?: Error, keyURIs: Array<?URI>) => mixed, callback: (error?: Error, keyURIs: Array<?URI>) => mixed,
) { ) {
this._fetchResultURIs(keys, (error, URIsByKey) => { this._fetchResultURIs(keys, (error, URIsByKey) => {
if (error != null) {
this._processError(error);
}
const URIs = keys.map(key => URIsByKey && URIsByKey.get(key)); const URIs = keys.map(key => URIsByKey && URIsByKey.get(key));
callback(error, URIs); callback(undefined, URIs);
}); });
} }
@ -152,13 +162,14 @@ class KeyURIFetcher {
this._batchProcessor.queue(key, callback); this._batchProcessor.queue(key, callback);
} }
constructor(fetchResultURIs: FetchResultURIs) { constructor(fetchResultURIs: FetchResultURIs, processError: (error: Error) => mixed) {
this._fetchResultURIs = fetchResultURIs; this._fetchResultURIs = fetchResultURIs;
this._batchProcessor = new BatchProcessor({ this._batchProcessor = new BatchProcessor({
maximumDelayMs: 10, maximumDelayMs: 10,
maximumItems: 500, maximumItems: 500,
concurrency: 25, concurrency: 25,
}, this._processKeys.bind(this)); }, this._processKeys.bind(this));
this._processError = processError;
} }
} }
@ -260,8 +271,24 @@ class TransformProfileSet {
class GlobalTransformCache { class GlobalTransformCache {
_fetcher: KeyURIFetcher; _fetcher: KeyURIFetcher;
_store: ?KeyResultStore;
_profileSet: TransformProfileSet; _profileSet: TransformProfileSet;
_reporter: Reporter;
_retries: number;
_store: ?KeyResultStore;
/**
* If too many errors already happened, we just drop the additional errors.
*/
_processError(error: Error) {
if (this._retries <= 0) {
return;
}
this._reporter.update({type: 'global_cache_error', error});
--this._retries;
if (this._retries <= 0) {
this._reporter.update({type: 'global_cache_disabled', reason: 'too_many_errors'});
}
}
/** /**
* For using the global cache one needs to have some kind of central key-value * For using the global cache one needs to have some kind of central key-value
@ -275,9 +302,12 @@ class GlobalTransformCache {
fetchResultURIs: FetchResultURIs, fetchResultURIs: FetchResultURIs,
storeResults: ?StoreResults, storeResults: ?StoreResults,
profiles: Iterable<TransformProfile>, profiles: Iterable<TransformProfile>,
reporter: Reporter,
) { ) {
this._fetcher = new KeyURIFetcher(fetchResultURIs, this._processError.bind(this));
this._profileSet = new TransformProfileSet(profiles); this._profileSet = new TransformProfileSet(profiles);
this._fetcher = new KeyURIFetcher(fetchResultURIs); this._reporter = reporter;
this._retries = 4;
if (storeResults != null) { if (storeResults != null) {
this._store = new KeyResultStore(storeResults); this._store = new KeyResultStore(storeResults);
} }
@ -322,8 +352,22 @@ class GlobalTransformCache {
}); });
} }
/**
* Wrap `_fetchFromURI` with error logging, and return an empty result instead
* of errors. This is because the global cache is not critical to the normal
* packager operation.
*/
_tryFetchingFromURI(uri: string, callback: FetchCallback) {
this._fetchFromURI(uri, (error, result) => {
if (error != null) {
this._processError(error);
}
callback(undefined, result);
});
}
fetch(props: FetchProps, callback: FetchCallback) { fetch(props: FetchProps, callback: FetchCallback) {
if (!this._profileSet.has(props.transformOptions)) { if (this._retries <= 0 || !this._profileSet.has(props.transformOptions)) {
process.nextTick(callback); process.nextTick(callback);
return; return;
} }
@ -335,7 +379,7 @@ class GlobalTransformCache {
callback(); callback();
return; return;
} }
this._fetchFromURI(uri, callback); this._tryFetchingFromURI(uri, callback);
} }
}); });
} }

View File

@ -152,7 +152,8 @@ class TerminalReporter {
terminal.log(`${DEP_GRAPH_MESSAGE}, done.`); terminal.log(`${DEP_GRAPH_MESSAGE}, done.`);
break; break;
case 'global_cache_error': case 'global_cache_error':
reporting.logWarning(terminal, 'The global cache failed: %s', event.error.stack); const message = JSON.stringify(event.error.message);
reporting.logWarning(terminal, 'the global cache failed: %s', message);
break; break;
case 'global_cache_disabled': case 'global_cache_disabled':
this._logCacheDisabled(event.reason); this._logCacheDisabled(event.reason);

View File

@ -80,8 +80,6 @@ class Module {
_readSourceCodePromise: Promise<string>; _readSourceCodePromise: Promise<string>;
_readPromises: Map<string, Promise<ReadResult>>; _readPromises: Map<string, Promise<ReadResult>>;
static _globalCacheRetries: number;
constructor({ constructor({
cache, cache,
depGraphHelpers, depGraphHelpers,
@ -268,24 +266,14 @@ class Module {
callback: (error: ?Error, result: ?TransformedCode) => void, callback: (error: ?Error, result: ?TransformedCode) => void,
) { ) {
const {_globalCache} = this; const {_globalCache} = this;
const noMoreRetries = Module._globalCacheRetries <= 0; if (_globalCache == null) {
if (_globalCache == null || noMoreRetries) {
this._transformCodeForCallback(cacheProps, callback); this._transformCodeForCallback(cacheProps, callback);
return; return;
} }
_globalCache.fetch(cacheProps, (globalCacheError, globalCachedResult) => { _globalCache.fetch(cacheProps, (globalCacheError, globalCachedResult) => {
if (globalCacheError != null && Module._globalCacheRetries > 0) { if (globalCacheError) {
this._reporter.update({ callback(globalCacheError);
type: 'global_cache_error', return;
error: globalCacheError,
});
Module._globalCacheRetries--;
if (Module._globalCacheRetries <= 0) {
this._reporter.update({
type: 'global_cache_disabled',
reason: 'too_many_errors',
});
}
} }
if (globalCachedResult == null) { if (globalCachedResult == null) {
this._transformAndStoreCodeGlobally(cacheProps, _globalCache, callback); this._transformAndStoreCodeGlobally(cacheProps, _globalCache, callback);
@ -379,8 +367,6 @@ class Module {
} }
} }
Module._globalCacheRetries = 4;
// use weak map to speed up hash creation of known objects // use weak map to speed up hash creation of known objects
const knownHashes = new WeakMap(); const knownHashes = new WeakMap();
function stableObjectHash(object) { function stableObjectHash(object) {