JSTransformer/worker: cache transformed code as separate files

Reviewed By: davidaurelio, cpojer

Differential Revision: D4051477

fbshipit-source-id: 0179ce18dd20c00083ae05b5cf5f925659e7c056
This commit is contained in:
Jean Lauliac 2016-11-08 03:30:24 -08:00 committed by Facebook Github Bot
parent 6212c223ec
commit c18d46bd03
14 changed files with 835 additions and 197 deletions

View File

@ -134,9 +134,10 @@ class Bundler {
}
}
const transformCacheKey = cacheKeyParts.join('$');
this._cache = new Cache({
resetCache: opts.resetCache,
cacheKey: cacheKeyParts.join('$'),
cacheKey: transformCacheKey,
});
this._transformer = new Transformer({
@ -157,7 +158,8 @@ class Bundler {
resetCache: opts.resetCache,
transformCode:
(module, code, options) =>
this._transformer.transformFile(module.path, code, options),
this._transformer.transformFile(module.path, code, options, transformCacheKey),
transformCacheKey,
});
this._projectRoots = opts.projectRoots;

View File

@ -9,6 +9,7 @@
'use strict';
jest
.unmock('imurmurhash')
.unmock('../../lib/ModuleTransport')
.unmock('../');
@ -26,6 +27,7 @@ const {any} = jasmine;
describe('Transformer', function() {
let options, workers, Cache;
const fileName = '/an/arbitrary/file.js';
const transformCacheKey = 'abcdef';
const transformModulePath = __filename;
beforeEach(function() {
@ -45,45 +47,37 @@ describe('Transformer', function() {
it('passes transform module path, file path, source code, and options to the worker farm when transforming', () => {
const transformOptions = {arbitrary: 'options'};
const code = 'arbitrary(code)';
new Transformer(options).transformFile(fileName, code, transformOptions);
new Transformer(options).transformFile(fileName, code, transformOptions, transformCacheKey);
expect(workers.transformAndExtractDependencies).toBeCalledWith(
transformModulePath,
fileName,
code,
transformOptions,
transformCacheKey,
any(Function),
);
});
pit('passes the data produced by the worker back', () => {
const transformer = new Transformer(options);
const result = { code: 'transformed', map: 'sourceMap' };
workers.transformAndExtractDependencies.mockImpl(function(transformPath, filename, code, options, callback) {
callback(null, result);
});
return transformer.transformFile(fileName, '', {})
.then(data => expect(data).toBe(result));
});
pit('should add file info to parse errors', function() {
const transformer = new Transformer(options);
var message = 'message';
var snippet = 'snippet';
workers.transformAndExtractDependencies.mockImpl(function(transformPath, filename, code, options, callback) {
var babelError = new SyntaxError(message);
babelError.type = 'SyntaxError';
babelError.description = message;
babelError.loc = {
line: 2,
column: 15,
};
babelError.codeFrame = snippet;
callback(babelError);
});
workers.transformAndExtractDependencies.mockImpl(
function(transformPath, filename, code, options, transformCacheKey, callback) {
var babelError = new SyntaxError(message);
babelError.type = 'SyntaxError';
babelError.description = message;
babelError.loc = {
line: 2,
column: 15,
};
babelError.codeFrame = snippet;
callback(babelError);
},
);
return transformer.transformFile(fileName, '', {})
return transformer.transformFile(fileName, '', {}, transformCacheKey)
.catch(function(error) {
expect(error.type).toEqual('TransformError');
expect(error.message).toBe('SyntaxError ' + message);

View File

@ -108,18 +108,17 @@ class Transformer {
this._workers && workerFarm.end(this._workers);
}
transformFile(fileName, code, options) {
transformFile(fileName, code, options, transformCacheKey) {
if (!this._transform) {
return Promise.reject(new Error('No transform module'));
}
debug('transforming file', fileName);
return this
._transform(this._transformModulePath, fileName, code, options)
.then(result => {
Logger.log(result.transformFileStartLogEntry);
Logger.log(result.transformFileEndLogEntry);
._transform(this._transformModulePath, fileName, code, options, transformCacheKey)
.then(stats => {
Logger.log(stats.transformFileStartLogEntry);
Logger.log(stats.transformFileEndLogEntry);
debug('done transforming file', fileName);
return result;
})
.catch(error => {
if (error.type === 'TimeoutError') {

View File

@ -13,11 +13,13 @@ jest.mock('../constant-folding');
jest.mock('../extract-dependencies');
jest.mock('../inline');
jest.mock('../minify');
jest.mock('../../../lib/TransformCache');
const {any, objectContaining} = jasmine;
describe('code transformation worker:', () => {
let transformCode;
let TransformCache;
let extractDependencies, transform;
beforeEach(() => {
@ -26,13 +28,14 @@ describe('code transformation worker:', () => {
extractDependencies =
require('../extract-dependencies').mockReturnValue({});
transform = jest.fn();
TransformCache = require('../../../lib/TransformCache');
});
it('calls the transform with file name, source code, and transform options', function() {
const filename = 'arbitrary/file.js';
const sourceCode = 'arbitrary(code)';
const transformOptions = {arbitrary: 'options'};
transformCode(transform, filename, sourceCode, {transform: transformOptions});
transformCode(transform, filename, sourceCode, {transform: transformOptions}, '');
expect(transform).toBeCalledWith(
{filename, sourceCode, options: transformOptions}, any(Function));
});
@ -40,12 +43,12 @@ describe('code transformation worker:', () => {
it('prefixes JSON files with an assignment to module.exports to make the code valid', function() {
const filename = 'arbitrary/file.json';
const sourceCode = '{"arbitrary":"property"}';
transformCode(transform, filename, sourceCode, {});
transformCode(transform, filename, sourceCode, {}, '');
expect(transform).toBeCalledWith(
{filename, sourceCode: `module.exports=${sourceCode}`}, any(Function));
});
it('calls back with the result of the transform', done => {
it('calls back with the result of the transform in the cache', done => {
const result = {
code: 'some.other(code)',
map: {}
@ -53,8 +56,10 @@ describe('code transformation worker:', () => {
transform.mockImplementation((_, callback) =>
callback(null, result));
transformCode(transform, 'filename', 'code', {}, (_, data) => {
expect(data).toEqual(objectContaining(result));
transformCode(transform, 'filename', 'code', {}, '', error => {
expect(error).toBeNull();
expect(TransformCache.mock.lastWrite.result)
.toEqual(objectContaining(result));
done();
});
});
@ -69,8 +74,9 @@ describe('code transformation worker:', () => {
transform.mockImplementation((_, callback) =>
callback(null, result));
transformCode(transform, 'aribtrary/file.json', 'b', {}, (_, data) => {
expect(data.code).toBe('{a:1,b:2}');
transformCode(transform, 'arbitrary/file.json', 'b', {}, '', error => {
expect(error).toBeNull();
expect(TransformCache.mock.lastWrite.result.code).toEqual('{a:1,b:2}');
done();
});
}
@ -82,9 +88,11 @@ describe('code transformation worker:', () => {
code: `${shebang} \n arbitrary(code)`,
};
transform.mockImplementation((_, callback) => callback(null, result));
transformCode(transform, 'arbitrary/file.js', 'b', {}, (_, data) => {
expect(data.code).not.toContain(shebang);
expect(data.code.split('\n').length).toEqual(result.code.split('\n').length);
transformCode(transform, 'arbitrary/file.js', 'b', {}, '', error => {
expect(error).toBeNull();
const code = TransformCache.mock.lastWrite.result.code;
expect(code).not.toContain(shebang);
expect(code.split('\n').length).toEqual(result.code.split('\n').length);
done();
});
});
@ -92,7 +100,7 @@ describe('code transformation worker:', () => {
it('calls back with any error yielded by the transform', done => {
const error = Error('arbitrary error');
transform.mockImplementation((_, callback) => callback(error));
transformCode(transform, 'filename', 'code', {}, e => {
transformCode(transform, 'filename', 'code', {}, '', e => {
expect(e).toBe(error);
done();
});
@ -109,7 +117,8 @@ describe('code transformation worker:', () => {
it('passes the transformed code the `extractDependencies`', done => {
code = 'arbitrary(code)';
transformCode(transform, 'filename', 'code', {}, (_, data) => {
transformCode(transform, 'filename', 'code', {}, '', (error) => {
expect(error).toBeNull();
expect(extractDependencies).toBeCalledWith(code);
done();
});
@ -125,7 +134,9 @@ describe('code transformation worker:', () => {
};
extractDependencies.mockReturnValue(dependencyData);
transformCode(transform, 'filename', 'code', {}, (_, data) => {
transformCode(transform, 'filename', 'code', {}, '', error => {
expect(error).toBeNull();
const data = TransformCache.mock.lastWrite.result;
expect(data).toEqual(objectContaining(dependencyData));
done();
});
@ -138,7 +149,11 @@ describe('code transformation worker:', () => {
'filename',
'code',
{extern: true},
(_, {dependencies, dependencyOffsets}) => {
'',
error => {
expect(error).toBeNull();
const {dependencies, dependencyOffsets} =
TransformCache.mock.lastWrite.result;
expect(extractDependencies).not.toBeCalled();
expect(dependencies).toEqual([]);
expect(dependencyOffsets).toEqual([]);
@ -153,7 +168,11 @@ describe('code transformation worker:', () => {
'arbitrary.json',
'{"arbitrary":"json"}',
{},
(_, {dependencies, dependencyOffsets}) => {
'',
error => {
expect(error).toBeNull();
const {dependencies, dependencyOffsets} =
TransformCache.mock.lastWrite.result;
expect(extractDependencies).not.toBeCalled();
expect(dependencies).toEqual([]);
expect(dependencyOffsets).toEqual([]);
@ -191,7 +210,7 @@ describe('code transformation worker:', () => {
it('passes the transform result to `inline` for constant inlining', done => {
transformResult = {map: {version: 3}, code: 'arbitrary(code)'};
transformCode(transform, filename, 'code', options, () => {
transformCode(transform, filename, 'code', options, '', () => {
expect(inline).toBeCalledWith(filename, transformResult, options);
done();
});
@ -200,29 +219,30 @@ describe('code transformation worker:', () => {
it('passes the result obtained from `inline` on to `constant-folding`', done => {
const inlineResult = {map: {version: 3, sources: []}, ast: {}};
inline.mockReturnValue(inlineResult);
transformCode(transform, filename, 'code', options, () => {
transformCode(transform, filename, 'code', options, '', () => {
expect(constantFolding).toBeCalledWith(filename, inlineResult);
done();
});
});
it('Uses the code obtained from `constant-folding` to extract dependencies', done => {
transformCode(transform, filename, 'code', options, () => {
transformCode(transform, filename, 'code', options, '', () => {
expect(extractDependencies).toBeCalledWith(foldedCode);
done();
});
});
it('uses the dependencies obtained from the optimized result', done => {
transformCode(transform, filename, 'code', options, (_, result) => {
transformCode(transform, filename, 'code', options, '', () => {
const result = TransformCache.mock.lastWrite.result;
expect(result.dependencies).toEqual(dependencyData.dependencies);
done();
});
});
it('uses data produced by `constant-folding` for the result', done => {
transformCode(transform, 'filename', 'code', options, (_, result) => {
expect(result)
transformCode(transform, 'filename', 'code', options, '', () => {
expect(TransformCache.mock.lastWrite.result)
.toEqual(objectContaining({code: foldedCode, map: foldedMap}));
done();
});

View File

@ -8,6 +8,12 @@
*/
'use strict';
require('../../../../babelRegisterOnly')([
/packager\/react-packager\/src\/lib\/TransformCache/,
]);
const TransformCache = require('../../lib/TransformCache');
const constantFolding = require('./constant-folding');
const extractDependencies = require('./extract-dependencies');
const inline = require('./inline');
@ -20,7 +26,7 @@ function makeTransformParams(filename, sourceCode, options) {
return {filename, sourceCode, options};
}
function transformCode(transform, filename, sourceCode, options, callback) {
function transformCode(transform, filename, sourceCode, options, transformCacheKey, callback) {
const params = makeTransformParams(filename, sourceCode, options.transform);
const isJson = filename.endsWith('.json');
@ -72,10 +78,18 @@ function transformCode(transform, filename, sourceCode, options, callback) {
result.code = code;
result.map = map;
result.transformFileStartLogEntry = transformFileStartLogEntry;
result.transformFileEndLogEntry = transformFileEndLogEntry;
callback(null, result);
TransformCache.writeSync({
filePath: filename,
sourceCode,
transformCacheKey,
transformOptions: options,
result,
});
return callback(null, {
transformFileStartLogEntry,
transformFileEndLogEntry,
});
});
}
@ -84,9 +98,10 @@ exports.transformAndExtractDependencies = (
filename,
sourceCode,
options,
transformCacheKey,
callback
) => {
transformCode(require(transform), filename, sourceCode, options || {}, callback);
transformCode(require(transform), filename, sourceCode, options || {}, transformCacheKey, callback);
};
exports.minify = (filename, code, sourceMap, callback) => {

View File

@ -50,6 +50,9 @@ const validateOpts = declareOpts({
transformCode: {
type: 'function',
},
transformCacheKey: {
type: 'string',
},
extraNodeModules: {
type: 'object',
required: false,
@ -102,10 +105,15 @@ class Resolver {
cache: opts.cache,
shouldThrowOnUnresolvedErrors: (_, platform) => platform !== 'android',
transformCode: opts.transformCode,
transformCacheKey: opts.transformCacheKey,
extraNodeModules: opts.extraNodeModules,
assetDependencies: ['react-native/Libraries/Image/AssetRegistry'],
// for jest-haste-map
resetCache: options.resetCache,
moduleOptions: {
cacheTransformResults: true,
resetCache: options.resetCache,
},
});
this._minifyCode = opts.minifyCode;

303
react-packager/src/lib/TransformCache.js vendored Normal file
View File

@ -0,0 +1,303 @@
/**
* Copyright (c) 2016-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @flow
*/
'use strict';
const fs = require('fs');
/**
* We get the package "for free" with "write-file-atomic". MurmurHash3 is a
* faster hash, but non-cryptographic and insecure, that seems reasonnable for
* this particular use case.
*/
const imurmurhash = require('imurmurhash');
const jsonStableStringify = require('json-stable-stringify');
const mkdirp = require('mkdirp');
const path = require('path');
const rimraf = require('rimraf');
const writeFileAtomicSync = require('write-file-atomic').sync;
const CACHE_NAME = 'react-native-packager-cache';
const TMP_DIR = path.join(require('os').tmpdir(), CACHE_NAME);
type CacheFilePaths = {transformedCode: string, metadata: string};
function hashSourceCode(props: {
sourceCode: string,
transformCacheKey: string,
}): string {
return imurmurhash(props.transformCacheKey).hash(props.sourceCode).result();
}
/**
* The path, built as a hash, does not take the source code itself into account
* because it would generate lots of file during development. (The source hash
* is stored in the metadata instead).
*/
function getCacheFilePaths(props: {
filePath: string,
transformOptions: mixed,
}): CacheFilePaths {
const hasher = imurmurhash()
.hash(props.filePath)
.hash(jsonStableStringify(props.transformOptions) || '');
let hash = hasher.result().toString(16);
hash = Array(8 - hash.length + 1).join('0') + hash;
const prefix = hash.substr(0, 2);
const fileName = `${hash.substr(2)}${path.basename(props.filePath)}`;
const base = path.join(TMP_DIR, prefix, fileName);
return {transformedCode: base, metadata: base + '.meta'};
}
type CachedResult = {
code: string,
dependencies?: ?Array<string>,
dependencyOffsets?: ?Array<number>,
map?: ?{},
};
/**
* We want to unlink all cache files before writing, so that it is as much
* atomic as possible.
*/
function unlinkIfExistsSync(filePath: string) {
try {
fs.unlinkSync(filePath);
} catch (error) {
if (error.code === 'ENOENT') {
return;
}
throw error;
}
}
/**
* In the workers we are fine doing sync work because a worker is meant to
* process a single source file at a time.
*
* We store the transformed JS because it is likely to be much bigger than the
* rest of the data JSON. Probably the map should be stored separately as well.
*
* We make the write operation as much atomic as possible: indeed, if another
* process is reading the cache at the same time, there would be a risk it
* reads new transformed code, but old metadata. This is avoided by removing
* the files first.
*
* There is still a risk of conflincting writes, that is mitigated by hashing
* the result code, that is verified at the end. In case of writes happening
* close to each others, one of the workers is going to loose its results no
* matter what.
*/
function writeSync(props: {
filePath: string,
sourceCode: string,
transformCacheKey: string,
transformOptions: mixed,
result: CachedResult,
}): void {
const cacheFilePath = getCacheFilePaths(props);
mkdirp.sync(path.dirname(cacheFilePath.transformedCode));
const {result} = props;
unlinkIfExistsSync(cacheFilePath.transformedCode);
unlinkIfExistsSync(cacheFilePath.metadata);
writeFileAtomicSync(cacheFilePath.transformedCode, result.code);
writeFileAtomicSync(cacheFilePath.metadata, JSON.stringify([
imurmurhash(result.code).result(),
hashSourceCode(props),
result.dependencies,
result.dependencyOffsets,
result.map,
]));
}
type CacheOptions = {resetCache?: boolean};
/* 1 day */
const GARBAGE_COLLECTION_PERIOD = 24 * 60 * 60 * 1000;
/* 4 days */
const CACHE_FILE_MAX_LAST_ACCESS_TIME = GARBAGE_COLLECTION_PERIOD * 4;
/**
* Temporary folder is cleaned up only on boot, ex. on OS X, as far as I'm
* concerned. Since generally people reboot only very rarely, we need to clean
* up old stuff from time to time.
*
* This code should be safe even if two different React Native projects are
* running at the same time.
*/
const GARBAGE_COLLECTOR = new (class GarbageCollector {
_lastCollected: ?number;
_cacheWasReset: boolean;
constructor() {
this._cacheWasReset = false;
}
_collectSync() {
mkdirp.sync(TMP_DIR);
const prefixDirs = fs.readdirSync(TMP_DIR);
for (let i = 0; i < prefixDirs.length; ++i) {
const prefixDir = path.join(TMP_DIR, prefixDirs[i]);
const cacheFileNames = fs.readdirSync(prefixDir);
for (let j = 0; j < cacheFileNames.length; ++j) {
const cacheFilePath = path.join(prefixDir, cacheFileNames[j]);
const stats = fs.lstatSync(cacheFilePath);
const timeSinceLastAccess = Date.now() - stats.atime.getTime();
if (
stats.isFile() &&
timeSinceLastAccess > CACHE_FILE_MAX_LAST_ACCESS_TIME
) {
fs.unlinkSync(cacheFilePath);
}
}
}
}
/**
* We want to avoid preventing tool use if the cleanup fails for some reason,
* but still provide some chance for people to report/fix things.
*/
_collectSyncNoThrow() {
try {
this._collectSync();
} catch (error) {
console.error(error.stack);
console.error(
'Error: Cleaning up the cache folder failed. Continuing anyway.',
);
console.error('The cache folder is: %s', TMP_DIR);
}
this._lastCollected = Date.now();
}
_resetCache() {
rimraf.sync(TMP_DIR);
console.log('Warning: The transform cache was reset.');
this._cacheWasReset = true;
this._lastCollected = Date.now();
}
collectIfNecessarySync(options: CacheOptions) {
if (options.resetCache && !this._cacheWasReset) {
this._resetCache();
return;
}
const lastCollected = this._lastCollected;
if (
lastCollected == null ||
Date.now() - lastCollected > GARBAGE_COLLECTION_PERIOD
) {
this._collectSyncNoThrow();
}
}
})();
function readMetadataFileSync(
metadataFilePath: string,
): ?{
cachedResultHash: number,
cachedSourceHash: number,
dependencies: ?Array<string>,
dependencyOffsets: ?Array<number>,
sourceMap: ?{},
} {
const metadataStr = fs.readFileSync(metadataFilePath, 'utf8');
let metadata;
try {
metadata = JSON.parse(metadataStr);
} catch (error) {
if (error instanceof SyntaxError) {
return null;
}
throw error;
}
if (!Array.isArray(metadata)) {
return null;
}
const [
cachedResultHash,
cachedSourceHash,
dependencies,
dependencyOffsets,
sourceMap,
] = metadata;
if (
typeof cachedResultHash !== 'number' ||
typeof cachedSourceHash !== 'number' ||
!(dependencies == null || (Array.isArray(dependencies) && dependencies.every(dep => typeof dep === 'string'))) ||
!(dependencyOffsets == null || (Array.isArray(dependencyOffsets) && dependencyOffsets.every(offset => typeof offset === 'number'))) ||
!(sourceMap == null || typeof sourceMap === 'object')
) {
return null;
}
return {
cachedResultHash,
cachedSourceHash,
dependencies,
dependencyOffsets,
sourceMap,
};
}
/**
* We verify the source hash matches to ensure we always favor rebuilding when
* source change (rather than just using fs.mtime(), a bit less robust).
*
* That means when the source changes, we override the old transformed code with
* the new one. This is, I believe, preferable, so as to avoid bloating the
* cache during development cycles, where people changes files all the time.
* If we implement a global cache ability at some point, we'll be able to store
* old artifacts as well.
*
* Meanwhile we store transforms with different options in different files so
* that it is fast to switch between ex. minified, or not.
*/
function readSync(props: {
filePath: string,
sourceCode: string,
transformOptions: mixed,
transformCacheKey: string,
cacheOptions: CacheOptions,
}): ?CachedResult {
GARBAGE_COLLECTOR.collectIfNecessarySync(props.cacheOptions);
const cacheFilePaths = getCacheFilePaths(props);
let metadata, transformedCode;
try {
metadata = readMetadataFileSync(cacheFilePaths.metadata);
if (metadata == null) {
return null;
}
const sourceHash = hashSourceCode(props);
if (sourceHash !== metadata.cachedSourceHash) {
return null;
}
transformedCode = fs.readFileSync(cacheFilePaths.transformedCode, 'utf8');
if (metadata.cachedResultHash !== imurmurhash(transformedCode).result()) {
return null;
}
} catch (error) {
if (error.code === 'ENOENT') {
return null;
}
throw error;
}
return {
code: transformedCode,
dependencies: metadata.dependencies,
dependencyOffsets: metadata.dependencyOffsets,
map: metadata.sourceMap,
};
}
module.exports = {
writeSync,
readSync,
};

View File

@ -0,0 +1,44 @@
/**
* Copyright (c) 2016-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
'use strict';
const imurmurhash = require('imurmurhash');
const jsonStableStringify = require('json-stable-stringify');
const transformCache = new Map();
const mock = {
lastWrite: null,
reset() {
transformCache.clear();
mock.lastWrite = null;
},
};
const transformCacheKeyOf = (props) =>
props.filePath + '-' + imurmurhash(props.sourceCode)
.hash(props.transformCacheKey)
.hash(jsonStableStringify(props.transformOptions || {}))
.result().toString(16);
function writeSync(props) {
transformCache.set(transformCacheKeyOf(props), props.result);
mock.lastWrite = props;
}
function readSync(props) {
return transformCache.get(transformCacheKeyOf(props));
}
module.exports = {
writeSync,
readSync,
mock,
};

View File

@ -0,0 +1,133 @@
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
'use strict';
jest
.dontMock('imurmurhash')
.dontMock('json-stable-stringify')
.dontMock('../TransformCache');
const imurmurhash = require('imurmurhash');
const memoryFS = new Map();
jest.mock('fs', () => ({
readFileSync(filePath) {
return memoryFS.get(filePath);
},
unlinkSync(filePath) {
memoryFS.delete(filePath);
},
readdirSync(dirPath) {
// Not required for it to work.
return [];
}
}));
jest.mock('write-file-atomic', () => ({
sync(filePath, data) {
memoryFS.set(filePath, data.toString());
},
}));
jest.mock('rimraf', () => () => {});
function cartesianProductOf(a1, a2) {
const product = [];
a1.forEach(e1 => a2.forEach(e2 => product.push([e1, e2])));
return product;
}
describe('TransformCache', () => {
let TransformCache;
beforeEach(() => {
jest.resetModules();
memoryFS.clear();
TransformCache = require('../TransformCache');
});
it('is caching different files and options separately', () => {
const transformCacheKey = 'abcdef';
const argsFor = ([filePath, transformOptions]) => {
const key = filePath + JSON.stringify(transformOptions);
return {
sourceCode: `/* source for ${key} */`,
transformCacheKey,
filePath,
transformOptions,
result: {
code: `/* result for ${key} */`,
dependencies: ['foo', `dep of ${key}`],
dependencyOffsets: [12, imurmurhash('dep' + key).result()],
map: {desc: `source map for ${key}`},
},
};
};
const allCases = cartesianProductOf(
['/some/project/sub/dir/file.js', '/some/project/other.js'],
[{foo: 1}, {foo: 2}],
);
allCases.forEach(
entry => TransformCache.writeSync(argsFor(entry)),
);
allCases.forEach(entry => {
const args = argsFor(entry);
const {result} = args;
const cachedResult = TransformCache.readSync({
...args,
cacheOptions: {resetCache: false},
});
expect(cachedResult).toEqual(result);
});
});
it('is overriding cache when source code or transform key changes', () => {
const argsFor = ([sourceCode, transformCacheKey]) => {
const key = sourceCode + transformCacheKey;
return {
sourceCode,
transformCacheKey,
filePath: 'test.js',
transformOptions: {foo: 1},
result: {
code: `/* result for ${key} */`,
dependencies: ['foo', `dep of ${key}`],
dependencyOffsets: [12, imurmurhash('dep' + key).result()],
map: {desc: `source map for ${key}`},
},
};
};
const allCases = cartesianProductOf(
['/* foo */', '/* bar */'],
['abcd', 'efgh'],
);
allCases.forEach(entry => {
TransformCache.writeSync(argsFor(entry));
const args = argsFor(entry);
const {result} = args;
const cachedResult = TransformCache.readSync({
...args,
cacheOptions: {resetCache: false},
});
expect(cachedResult).toEqual(result);
});
allCases.pop();
allCases.forEach(entry => {
const cachedResult = TransformCache.readSync({
...argsFor(entry),
cacheOptions: {resetCache: false},
});
expect(cachedResult).toBeNull();
});
});
});

View File

@ -11,9 +11,12 @@
'use strict';
const TransformCache = require('../lib/TransformCache');
const crypto = require('crypto');
const docblock = require('./DependencyGraph/docblock');
const extractRequires = require('./lib/extractRequires');
const invariant = require('invariant');
const isAbsolutePath = require('absolute-path');
const jsonStableStringify = require('json-stable-stringify');
const path = require('path');
@ -23,23 +26,32 @@ import type ModuleCache from './ModuleCache';
import type FastFs from './fastfs';
export type Extractor = (sourceCode: string) => {deps: {sync: Array<string>}};
type TransformedCode = {
code?: string,
dependencies?: Array<string>,
dependencyOffsets?: Array<number>,
map?: string,
code: string,
dependencies?: ?Array<string>,
dependencyOffsets?: ?Array<number>,
map?: ?{},
};
type ReadResult = {
code?: string,
dependencies?: ?Array<string>,
dependencyOffsets?: ?Array<number>,
map?: ?{},
};
export type TransformCode = (
module: Module,
sourceCode: string,
transformOptions: mixed,
) => Promise<{
code: string,
dependencies?: Array<string>,
dependencyOffsets?: Array<number>,
map?: string,
}>;
export type Options = {cacheTransformResults?: boolean};
) => Promise<TransformedCode>;
export type Options = {
resetCache?: boolean,
cacheTransformResults?: boolean,
};
export type DepGraphHelpers = {isNodeModulesDir: (filePath: string) => boolean};
export type ConstructorArgs = {
@ -49,6 +61,7 @@ export type ConstructorArgs = {
cache: Cache,
extractor: Extractor,
transformCode: TransformCode,
transformCacheKey: ?string,
depGraphHelpers: DepGraphHelpers,
options: Options,
};
@ -63,11 +76,13 @@ class Module {
_cache: Cache;
_extractor: Extractor;
_transformCode: TransformCode;
_transformCacheKey: ?string;
_depGraphHelpers: DepGraphHelpers;
_options: Options;
_docBlock: Promise<{id?: string, moduleDocBlock: {[key: string]: mixed}}>;
_readPromise: Promise<string>;
_readSourceCodePromise: Promise<string>;
_readPromises: Map<string, Promise<ReadResult>>;
constructor({
file,
@ -76,6 +91,7 @@ class Module {
cache,
extractor = extractRequires,
transformCode,
transformCacheKey,
depGraphHelpers,
options,
}: ConstructorArgs) {
@ -91,8 +107,15 @@ class Module {
this._cache = cache;
this._extractor = extractor;
this._transformCode = transformCode;
this._transformCacheKey = transformCacheKey;
invariant(
transformCode == null || transformCacheKey != null,
'missing transform cache key',
);
this._depGraphHelpers = depGraphHelpers;
this._options = options;
this._options = options || {};
this._readPromises = new Map();
}
isHaste(): Promise<boolean> {
@ -147,8 +170,14 @@ class Module {
return this.read(transformOptions).then(({dependencies}) => dependencies);
}
/**
* We don't need to invalidate the TranformCache itself because it guarantees
* itself that if a source code changed we won't return the cached transformed
* code.
*/
invalidate() {
this._cache.invalidate(this.path);
this._readPromises.clear();
}
_parseDocBlock(docBlock) {
@ -167,56 +196,125 @@ class Module {
return {id, moduleDocBlock};
}
_read() {
if (!this._readPromise) {
this._readPromise = this._fastfs.readFile(this.path);
_readSourceCode() {
if (!this._readSourceCodePromise) {
this._readSourceCodePromise = this._fastfs.readFile(this.path);
}
return this._readPromise;
return this._readSourceCodePromise;
}
_readDocBlock() {
if (!this._docBlock) {
this._docBlock = this._read()
this._docBlock = this._readSourceCode()
.then(docBlock => this._parseDocBlock(docBlock));
}
return this._docBlock;
}
read(transformOptions: mixed): Promise<TransformedCode> {
return this._cache.get(
this.path,
cacheKey('moduleData', transformOptions),
() => {
return Promise.all([
this._read(),
this._readDocBlock(),
]).then(([source, {id, moduleDocBlock}]) => {
// Ignore requires in JSON files or generated code. An example of this
// is prebuilt files like the SourceMap library.
const extern = this.isJSON() || 'extern' in moduleDocBlock;
if (extern) {
transformOptions = {...transformOptions, extern};
}
const transformCode = this._transformCode;
const codePromise = transformCode
? transformCode(this, source, transformOptions)
: Promise.resolve({code: source});
return codePromise.then(result => {
const {
code,
/* $FlowFixMe: I don't think it should complain as there's
a default value */
dependencies = extern ? [] : this._extractor(code).deps.sync,
} = result;
if (this._options && this._options.cacheTransformResults === false) {
return {dependencies};
} else {
return {...result, dependencies, id, source};
}
});
});
/**
* To what we read from the cache or worker, we need to add id and source.
*/
_finalizeReadResult(
source: string,
id?: string,
extern: boolean,
result: TransformedCode,
) {
const {
code,
dependencies = extern ? [] : this._extractor(code).deps.sync,
} = result;
if (this._options.cacheTransformResults === false) {
return {dependencies};
} else {
return {...result, dependencies, id, source};
}
}
_transformAndCache(
transformOptions: mixed,
callback: (error: ?Error, result: ?TransformedCode) => void,
) {
this._readSourceCode().then(sourceCode => {
const transformCode = this._transformCode;
if (!transformCode) {
return callback(null, {code: sourceCode});
}
);
const codePromise = transformCode(this, sourceCode, transformOptions);
return codePromise.then(() => {
const transformCacheKey = this._transformCacheKey;
invariant(transformCacheKey != null, 'missing transform cache key');
const freshResult =
TransformCache.readSync({
filePath: this.path,
sourceCode,
transformCacheKey,
transformOptions,
cacheOptions: this._options,
});
if (freshResult == null) {
callback(new Error(
'Could not read fresh result from transform cache. This ' +
'means there is probably a bug in the worker code ' +
'that prevents it from writing to the cache correctly.',
));
return;
}
callback(undefined, freshResult);
}, callback);
}, callback);
}
/**
* Read everything about a module: source code, transformed code,
* dependencies, etc. The overall process is to read the cache first, and if
* it's a miss, we let the worker write to the cache and read it again.
*/
read(transformOptions: mixed): Promise<ReadResult> {
const key = stableObjectHash(transformOptions || {});
const promise = this._readPromises.get(key);
if (promise != null) {
return promise;
}
const freshPromise = Promise.all([
this._readSourceCode(),
this._readDocBlock(),
]).then(([sourceCode, {id, moduleDocBlock}]) => {
// Ignore requires in JSON files or generated code. An example of this
// is prebuilt files like the SourceMap library.
const extern = this.isJSON() || 'extern' in moduleDocBlock;
if (extern) {
transformOptions = {...transformOptions, extern};
}
const transformCacheKey = this._transformCacheKey;
invariant(transformCacheKey != null, 'missing transform cache key');
const cachedResult =
TransformCache.readSync({
filePath: this.path,
sourceCode,
transformCacheKey,
transformOptions,
cacheOptions: this._options,
});
if (cachedResult) {
return this._finalizeReadResult(sourceCode, id, extern, cachedResult);
}
return new Promise((resolve, reject) => {
this._transformAndCache(
transformOptions,
(transformError, freshResult) => {
if (transformError) {
reject(transformError);
return;
}
invariant(freshResult != null, 'inconsistent state');
resolve(this._finalizeReadResult(sourceCode, id, extern, freshResult));
},
);
});
});
this._readPromises.set(key, freshPromise);
return freshPromise;
}
hash() {
@ -265,10 +363,4 @@ function stableObjectHash(object) {
return digest;
}
function cacheKey(field, transformOptions) {
return transformOptions !== undefined
? stableObjectHash(transformOptions) + '\0' + field
: field;
}
module.exports = Module;

View File

@ -35,6 +35,7 @@ class ModuleCache {
_cache: Cache;
_extractRequires: Extractor;
_transformCode: TransformCode;
_transformCacheKey: string;
_depGraphHelpers: DepGraphHelpers;
_platforms: mixed;
_assetDependencies: mixed;
@ -46,6 +47,7 @@ class ModuleCache {
cache,
extractRequires,
transformCode,
transformCacheKey,
depGraphHelpers,
assetDependencies,
moduleOptions,
@ -54,6 +56,7 @@ class ModuleCache {
cache: Cache,
extractRequires: Extractor,
transformCode: TransformCode,
transformCacheKey: string,
depGraphHelpers: DepGraphHelpers,
assetDependencies: mixed,
moduleOptions: ModuleOptions,
@ -64,6 +67,7 @@ class ModuleCache {
this._cache = cache;
this._extractRequires = extractRequires;
this._transformCode = transformCode;
this._transformCacheKey = transformCacheKey;
this._depGraphHelpers = depGraphHelpers;
this._platforms = platforms;
this._assetDependencies = assetDependencies;
@ -82,6 +86,7 @@ class ModuleCache {
cache: this._cache,
extractor: this._extractRequires,
transformCode: this._transformCode,
transformCacheKey: this._transformCacheKey,
depGraphHelpers: this._depGraphHelpers,
options: this._moduleOptions,
});
@ -145,6 +150,7 @@ class ModuleCache {
fastfs: this._fastfs,
moduleCache: this,
transformCode: this._transformCode,
transformCacheKey: this._transformCacheKey,
});
}

View File

@ -12,7 +12,8 @@ jest.autoMockOff();
jest.useRealTimers();
jest
.mock('fs')
.mock('../../Logger');
.mock('../../Logger')
.mock('../../lib/TransformCache');
// This is an ugly hack:
// * jest-haste-map uses `find` for fast file system crawling which won't work
@ -206,6 +207,7 @@ describe('DependencyGraph', function() {
useWatchman: false,
maxWorkers: 1,
resetCache: true,
transformCacheKey: 'abcdef',
};
});

View File

@ -11,6 +11,7 @@
jest
.dontMock('absolute-path')
.dontMock('json-stable-stringify')
.dontMock('imurmurhash')
.dontMock('../fastfs')
.dontMock('../lib/extractRequires')
.dontMock('../lib/replacePatterns')
@ -24,6 +25,7 @@ const Fastfs = require('../fastfs');
const Module = require('../Module');
const ModuleCache = require('../ModuleCache');
const DependencyGraphHelpers = require('../DependencyGraph/DependencyGraphHelpers');
const TransformCache = require('../../lib/TransformCache');
const fs = require('graceful-fs');
const packageJson =
@ -62,6 +64,7 @@ describe('Module', () => {
end: jest.genMockFn(),
});
let transformCacheKey;
const createModule = (options) =>
new Module({
options: {
@ -73,21 +76,27 @@ describe('Module', () => {
file: options && options.file || fileName,
depGraphHelpers: new DependencyGraphHelpers(),
moduleCache: new ModuleCache({fastfs, cache}),
transformCacheKey,
});
const createFastFS = () =>
new Fastfs(
'test',
['/root'],
fileWatcher,
['/root/index.js', '/root/package.json'],
{ignore: []},
);
const createJSONModule =
(options) => createModule({...options, file: '/root/package.json'});
beforeEach(function() {
process.platform = 'linux';
cache = createCache();
fastfs = new Fastfs(
'test',
['/root'],
fileWatcher,
['/root/index.js', '/root/package.json'],
{ignore: []},
);
fastfs = createFastFS();
transformCacheKey = 'abcdef';
TransformCache.mock.reset();
});
describe('Module ID', () => {
@ -258,6 +267,7 @@ describe('Module', () => {
describe('Custom Code Transform', () => {
let transformCode;
let transformResult;
const fileContents = 'arbitrary(code);';
const exampleCode = `
${'require'}('a');
@ -265,9 +275,19 @@ describe('Module', () => {
${'require'}('c');`;
beforeEach(function() {
transformCode = jest.genMockFn();
transformResult = {code: ''};
transformCode = jest.genMockFn()
.mockImplementation((module, sourceCode, options) => {
TransformCache.writeSync({
filePath: module.path,
sourceCode,
transformOptions: options,
transformCacheKey,
result: transformResult,
});
return Promise.resolve();
});
mockIndexFile(fileContents);
transformCode.mockReturnValue(Promise.resolve({code: ''}));
});
pit('passes the module and file contents to the transform function when reading', () => {
@ -336,7 +356,7 @@ describe('Module', () => {
});
pit('uses the code that `transformCode` resolves to to extract dependencies', () => {
transformCode.mockReturnValue(Promise.resolve({code: exampleCode}));
transformResult = {code: exampleCode};
const module = createModule({transformCode});
return module.getDependencies().then(dependencies => {
@ -346,10 +366,10 @@ describe('Module', () => {
pit('uses dependencies that `transformCode` resolves to, instead of extracting them', () => {
const mockedDependencies = ['foo', 'bar'];
transformCode.mockReturnValue(Promise.resolve({
transformResult = {
code: exampleCode,
dependencies: mockedDependencies,
}));
};
const module = createModule({transformCode});
return module.getDependencies().then(dependencies => {
@ -358,23 +378,22 @@ describe('Module', () => {
});
pit('forwards all additional properties of the result provided by `transformCode`', () => {
const mockedResult = {
transformResult = {
code: exampleCode,
arbitrary: 'arbitrary',
dependencyOffsets: [12, 764],
map: {version: 3},
subObject: {foo: 'bar'},
};
transformCode.mockReturnValue(Promise.resolve(mockedResult));
const module = createModule({transformCode});
return module.read().then((result) => {
expect(result).toEqual(jasmine.objectContaining(mockedResult));
expect(result).toEqual(jasmine.objectContaining(transformResult));
});
});
pit('does not store anything but dependencies if the `cacheTransformResults` option is disabled', () => {
const mockedResult = {
transformResult = {
code: exampleCode,
arbitrary: 'arbitrary',
dependencies: ['foo', 'bar'],
@ -382,7 +401,6 @@ describe('Module', () => {
map: {version: 3},
subObject: {foo: 'bar'},
};
transformCode.mockReturnValue(Promise.resolve(mockedResult));
const module = createModule({transformCode, options: {
cacheTransformResults: false,
}});
@ -395,7 +413,7 @@ describe('Module', () => {
});
pit('stores all things if options is undefined', () => {
const mockedResult = {
transformResult = {
code: exampleCode,
arbitrary: 'arbitrary',
dependencies: ['foo', 'bar'],
@ -403,16 +421,15 @@ describe('Module', () => {
map: {version: 3},
subObject: {foo: 'bar'},
};
transformCode.mockReturnValue(Promise.resolve(mockedResult));
const module = createModule({transformCode, options: undefined});
return module.read().then((result) => {
expect(result).toEqual({ ...mockedResult, source: 'arbitrary(code);'});
expect(result).toEqual({ ...transformResult, source: 'arbitrary(code);'});
});
});
pit('exposes the transformed code rather than the raw file contents', () => {
transformCode.mockReturnValue(Promise.resolve({code: exampleCode}));
transformResult = {code: exampleCode};
const module = createModule({transformCode});
return Promise.all([module.read(), module.getCode()])
.then(([data, code]) => {
@ -429,7 +446,7 @@ describe('Module', () => {
pit('exposes a source map returned by the transform', () => {
const map = {version: 3};
transformCode.mockReturnValue(Promise.resolve({map, code: exampleCode}));
transformResult = {map, code: exampleCode};
const module = createModule({transformCode});
return Promise.all([module.read(), module.getMap()])
.then(([data, sourceMap]) => {
@ -438,64 +455,62 @@ describe('Module', () => {
});
});
describe('Caching based on options', () => {
let module;
beforeEach(function() {
module = createModule({transformCode});
});
const callsEqual = ([path1, key1], [path2, key2]) => {
expect(path1).toEqual(path2);
expect(key1).toEqual(key2);
};
it('gets dependencies from the cache with the same cache key for the same transform options', () => {
const options = {some: 'options'};
module.getDependencies(options); // first call
module.getDependencies(options); // second call
const {calls} = cache.get.mock;
callsEqual(calls[0], calls[1]);
});
it('gets dependencies from the cache with the same cache key for the equivalent transform options', () => {
module.getDependencies({a: 'b', c: 'd'}); // first call
module.getDependencies({c: 'd', a: 'b'}); // second call
const {calls} = cache.get.mock;
callsEqual(calls[0], calls[1]);
});
it('gets dependencies from the cache with different cache keys for different transform options', () => {
module.getDependencies({some: 'options'});
module.getDependencies({other: 'arbitrary options'});
const {calls} = cache.get.mock;
expect(calls[0][1]).not.toEqual(calls[1][1]);
});
it('gets code from the cache with the same cache key for the same transform options', () => {
const options = {some: 'options'};
module.getCode(options); // first call
module.getCode(options); // second call
const {calls} = cache.get.mock;
callsEqual(calls[0], calls[1]);
});
it('gets code from the cache with the same cache key for the equivalent transform options', () => {
module.getCode({a: 'b', c: 'd'}); // first call
module.getCode({c: 'd', a: 'b'}); // second call
const {calls} = cache.get.mock;
callsEqual(calls[0], calls[1]);
});
it('gets code from the cache with different cache keys for different transform options', () => {
module.getCode({some: 'options'});
module.getCode({other: 'arbitrary options'});
const {calls} = cache.get.mock;
expect(calls[0][1]).not.toEqual(calls[1][1]);
});
pit('caches the transform result for the same transform options', () => {
let module = createModule({transformCode});
return module.read()
.then(() => {
expect(transformCode).toHaveBeenCalledTimes(1);
// We want to check transform caching rather than shallow caching of
// Promises returned by read().
module = createModule({transformCode});
return module.read()
.then(() => {
expect(transformCode).toHaveBeenCalledTimes(1);
});
});
});
pit('triggers a new transform for different transform options', () => {
const module = createModule({transformCode});
return module.read({foo: 1})
.then(() => {
expect(transformCode).toHaveBeenCalledTimes(1);
return module.read({foo: 2})
.then(() => {
expect(transformCode).toHaveBeenCalledTimes(2);
});
});
});
pit('triggers a new transform for different source code', () => {
let module = createModule({transformCode});
return module.read()
.then(() => {
expect(transformCode).toHaveBeenCalledTimes(1);
cache = createCache();
fastfs = createFastFS();
mockIndexFile('test');
module = createModule({transformCode});
return module.read()
.then(() => {
expect(transformCode).toHaveBeenCalledTimes(2);
});
});
});
pit('triggers a new transform for different transform cache key', () => {
let module = createModule({transformCode});
return module.read()
.then(() => {
expect(transformCode).toHaveBeenCalledTimes(1);
transformCacheKey = 'other';
module = createModule({transformCode});
return module.read()
.then(() => {
expect(transformCode).toHaveBeenCalledTimes(2);
});
});
});
});
});

View File

@ -64,6 +64,7 @@ class DependencyGraph {
mocksPattern: mixed,
extractRequires: Extractor,
transformCode: TransformCode,
transformCacheKey: string,
shouldThrowOnUnresolvedErrors: () => boolean,
enableAssetMap: boolean,
moduleOptions: ModuleOptions,
@ -97,6 +98,7 @@ class DependencyGraph {
mocksPattern,
extractRequires,
transformCode,
transformCacheKey,
shouldThrowOnUnresolvedErrors = () => true,
enableAssetMap,
assetDependencies,
@ -120,6 +122,7 @@ class DependencyGraph {
mocksPattern: mixed,
extractRequires: Extractor,
transformCode: TransformCode,
transformCacheKey: string,
shouldThrowOnUnresolvedErrors: () => boolean,
enableAssetMap: boolean,
assetDependencies: mixed,
@ -142,6 +145,7 @@ class DependencyGraph {
mocksPattern,
extractRequires,
transformCode,
transformCacheKey,
shouldThrowOnUnresolvedErrors,
enableAssetMap: enableAssetMap || true,
moduleOptions: moduleOptions || {
@ -202,6 +206,7 @@ class DependencyGraph {
cache: this._cache,
extractRequires: this._opts.extractRequires,
transformCode: this._opts.transformCode,
transformCacheKey: this._opts.transformCacheKey,
depGraphHelpers: this._helpers,
assetDependencies: this._assetDependencies,
moduleOptions: this._opts.moduleOptions,