mirror of https://github.com/status-im/metro.git
packager: BatchProcessor: use Promise for processBatch()
Reviewed By: cpojer Differential Revision: D4572495 fbshipit-source-id: 4a18b6ae16ea588104c337f2085707be07609005
This commit is contained in:
parent
46d5c2f517
commit
243d0694ff
|
@ -13,10 +13,7 @@
|
|||
|
||||
const invariant = require('fbjs/lib/invariant');
|
||||
|
||||
type ProcessBatch<TItem, TResult> = (
|
||||
batch: Array<TItem>,
|
||||
callback: (error?: Error, orderedResults?: Array<TResult>) => mixed,
|
||||
) => mixed;
|
||||
type ProcessBatch<TItem, TResult> = (batch: Array<TItem>) => Promise<Array<TResult>>;
|
||||
|
||||
type BatchProcessorOptions = {
|
||||
maximumDelayMs: number,
|
||||
|
@ -45,10 +42,7 @@ class BatchProcessor<TItem, TResult> {
|
|||
_queue: Array<QueueItem<TItem, TResult>>;
|
||||
_timeoutHandle: ?number;
|
||||
|
||||
constructor(
|
||||
options: BatchProcessorOptions,
|
||||
processBatch: ProcessBatch<TItem, TResult>,
|
||||
) {
|
||||
constructor(options: BatchProcessorOptions, processBatch: ProcessBatch<TItem, TResult>) {
|
||||
this._options = options;
|
||||
this._processBatch = processBatch;
|
||||
this._queue = [];
|
||||
|
@ -57,30 +51,36 @@ class BatchProcessor<TItem, TResult> {
|
|||
(this: any)._processQueue = this._processQueue.bind(this);
|
||||
}
|
||||
|
||||
_onBatchFinished() {
|
||||
this._currentProcessCount--;
|
||||
this._processQueueOnceReady();
|
||||
}
|
||||
|
||||
_onBatchResults(jobs: Array<QueueItem<TItem, TResult>>, results: Array<TResult>) {
|
||||
invariant(results.length === jobs.length, 'Not enough results returned.');
|
||||
for (let i = 0; i < jobs.length; ++i) {
|
||||
jobs[i].resolve(results[i]);
|
||||
}
|
||||
this._onBatchFinished();
|
||||
}
|
||||
|
||||
_onBatchError(jobs: Array<QueueItem<TItem, TResult>>, error: mixed) {
|
||||
for (let i = 0; i < jobs.length; ++i) {
|
||||
jobs[i].reject(error);
|
||||
}
|
||||
this._onBatchFinished();
|
||||
}
|
||||
|
||||
_processQueue() {
|
||||
this._timeoutHandle = null;
|
||||
while (
|
||||
this._queue.length > 0 &&
|
||||
this._currentProcessCount < this._options.concurrency
|
||||
) {
|
||||
const {concurrency} = this._options;
|
||||
while (this._queue.length > 0 && this._currentProcessCount < concurrency) {
|
||||
this._currentProcessCount++;
|
||||
const jobs = this._queue.splice(0, this._options.maximumItems);
|
||||
const items = jobs.map(job => job.item);
|
||||
this._processBatch(items, (error, results) => {
|
||||
if (error != null) {
|
||||
for (let i = 0; i < jobs.length; ++i) {
|
||||
jobs[i].reject(error);
|
||||
}
|
||||
} else {
|
||||
invariant(results != null, 'Neither results or error were returned.');
|
||||
invariant(results.length === items.length, 'Not enough results returned.');
|
||||
for (let i = 0; i < jobs.length; ++i) {
|
||||
jobs[i].resolve(results[i]);
|
||||
}
|
||||
}
|
||||
this._currentProcessCount--;
|
||||
this._processQueueOnceReady();
|
||||
});
|
||||
this._processBatch(jobs.map(job => job.item)).then(
|
||||
this._onBatchResults.bind(this, jobs),
|
||||
this._onBatchError.bind(this, jobs),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,15 +23,8 @@ import type {Options as TransformOptions} from '../JSTransformer/worker/worker';
|
|||
import type {CachedResult, GetTransformCacheKey} from './TransformCache';
|
||||
import type {Reporter} from './reporting';
|
||||
|
||||
type FetchResultURIs = (
|
||||
keys: Array<string>,
|
||||
callback: (error?: Error, results?: Map<string, string>) => void,
|
||||
) => mixed;
|
||||
|
||||
type StoreResults = (
|
||||
resultsByKey: Map<string, CachedResult>,
|
||||
callback: (error?: Error) => void,
|
||||
) => mixed;
|
||||
type FetchResultURIs = (keys: Array<string>) => Promise<Map<string, string>>;
|
||||
type StoreResults = (resultsByKey: Map<string, CachedResult>) => Promise<void>;
|
||||
|
||||
type FetchProps = {
|
||||
filePath: string,
|
||||
|
@ -60,17 +53,15 @@ class KeyURIFetcher {
|
|||
* and we proceed as if there were no result for these keys instead. That way
|
||||
* a build will not fail just because of the cache.
|
||||
*/
|
||||
_processKeys(
|
||||
keys: Array<string>,
|
||||
callback: (error?: Error, keyURIs: Array<?URI>) => mixed,
|
||||
) {
|
||||
this._fetchResultURIs(keys, (error, URIsByKey) => {
|
||||
if (error != null) {
|
||||
this._processError(error);
|
||||
}
|
||||
const URIs = keys.map(key => URIsByKey && URIsByKey.get(key));
|
||||
callback(undefined, URIs);
|
||||
});
|
||||
async _processKeys(keys: Array<string>): Promise<Array<?URI>> {
|
||||
let URIsByKey;
|
||||
try {
|
||||
URIsByKey = await this._fetchResultURIs(keys);
|
||||
} catch (error) {
|
||||
this._processError(error);
|
||||
return new Array(keys.length);
|
||||
}
|
||||
return keys.map(key => URIsByKey.get(key));
|
||||
}
|
||||
|
||||
fetch(key: string, callback: FetchURICallback) {
|
||||
|
@ -92,21 +83,17 @@ class KeyURIFetcher {
|
|||
|
||||
}
|
||||
|
||||
type KeyedResult = {key: string, result: CachedResult};
|
||||
|
||||
class KeyResultStore {
|
||||
|
||||
_storeResults: StoreResults;
|
||||
_batchProcessor: BatchProcessor<{key: string, result: CachedResult}, void>;
|
||||
_batchProcessor: BatchProcessor<KeyedResult, void>;
|
||||
|
||||
_processResults(
|
||||
keyResults: Array<{key: string, result: CachedResult}>,
|
||||
callback: (error?: Error) => mixed,
|
||||
) {
|
||||
const resultsByKey = new Map(
|
||||
keyResults.map(pair => [pair.key, pair.result]),
|
||||
);
|
||||
this._storeResults(resultsByKey, error => {
|
||||
callback(error);
|
||||
});
|
||||
async _processResults(keyResults: Array<KeyedResult>): Promise<Array<void>> {
|
||||
const resultsByKey = new Map(keyResults.map(pair => [pair.key, pair.result]));
|
||||
await this._storeResults(resultsByKey);
|
||||
return new Array(keyResults.length);
|
||||
}
|
||||
|
||||
store(key: string, result: CachedResult) {
|
||||
|
|
|
@ -9,7 +9,9 @@
|
|||
|
||||
'use strict';
|
||||
|
||||
jest.dontMock('../BatchProcessor');
|
||||
jest
|
||||
.useRealTimers()
|
||||
.dontMock('../BatchProcessor');
|
||||
|
||||
const BatchProcessor = require('../BatchProcessor');
|
||||
|
||||
|
@ -21,29 +23,27 @@ describe('BatchProcessor', () => {
|
|||
concurrency: 2,
|
||||
};
|
||||
|
||||
it('aggregate items concurrently', () => {
|
||||
it('aggregate items concurrently', async () => {
|
||||
const input = [...Array(9).keys()].slice(1);
|
||||
const transform = e => e * 10;
|
||||
const batches = [];
|
||||
let concurrency = 0;
|
||||
let maxConcurrency = 0;
|
||||
const bp = new BatchProcessor(options, (items, callback) => {
|
||||
const bp = new BatchProcessor(options, (items) => new Promise(resolve => {
|
||||
++concurrency;
|
||||
expect(concurrency).toBeLessThanOrEqual(options.concurrency);
|
||||
maxConcurrency = Math.max(maxConcurrency, concurrency);
|
||||
batches.push(items);
|
||||
setTimeout(() => {
|
||||
callback(null, items.map(transform));
|
||||
resolve(items.map(transform));
|
||||
--concurrency;
|
||||
}, 0);
|
||||
});
|
||||
}));
|
||||
const results = [];
|
||||
input.forEach(e => bp.queue(e).then(
|
||||
await Promise.all(input.map(e => bp.queue(e).then(
|
||||
res => results.push(res),
|
||||
error => process.nextTick(() => { throw error; }),
|
||||
));
|
||||
jest.runAllTimers();
|
||||
jest.runAllTicks();
|
||||
)));
|
||||
expect(batches).toEqual([
|
||||
[1, 2, 3],
|
||||
[4, 5, 6],
|
||||
|
@ -53,17 +53,15 @@ describe('BatchProcessor', () => {
|
|||
expect(results).toEqual(input.map(transform));
|
||||
});
|
||||
|
||||
it('report errors', () => {
|
||||
it('report errors', async () => {
|
||||
const error = new Error('oh noes');
|
||||
const bp = new BatchProcessor(options, (items, callback) => {
|
||||
setTimeout(callback.bind(null, error), 0);
|
||||
});
|
||||
const bp = new BatchProcessor(options, (items) => new Promise((_, reject) => {
|
||||
setTimeout(reject.bind(null, error), 0);
|
||||
}));
|
||||
let receivedError;
|
||||
bp.queue('foo').catch(
|
||||
await bp.queue('foo').catch(
|
||||
err => { receivedError = err; },
|
||||
);
|
||||
jest.runAllTimers();
|
||||
jest.runAllTicks();
|
||||
expect(receivedError).toBe(error);
|
||||
});
|
||||
|
||||
|
|
Loading…
Reference in New Issue