mirror of https://github.com/status-im/metro.git
add flow
Summary: This adds flow types to new code written for the Buck/Packager integration. Reviewed By: cpojer Differential Revision: D4175156 fbshipit-source-id: 38c3d2c9176c7b3cf22b8baed7d445a75d033d04
This commit is contained in:
parent
907f08a794
commit
7181a2d436
|
@ -10,10 +10,10 @@
|
|||
*/
|
||||
'use strict';
|
||||
|
||||
const invariant = require('fbjs/lib/invariant');
|
||||
const memoize = require('async/memoize');
|
||||
const queue = require('async/queue');
|
||||
const seq = require('async/seq');
|
||||
const invariant = require('fbjs/lib/invariant');
|
||||
|
||||
import type {GraphFn, LoadFn, ResolveFn, File, Module} from './types.flow';
|
||||
|
||||
|
@ -28,7 +28,8 @@ exports.create = function create(resolve: ResolveFn, load: LoadFn): GraphFn {
|
|||
|
||||
if (typeof platform !== 'string') {
|
||||
log.error('`Graph`, called without a platform');
|
||||
return callback(Error('The target platform has to be passed'));
|
||||
callback(Error('The target platform has to be passed'));
|
||||
return;
|
||||
}
|
||||
|
||||
const modules: Map<string | null, Module> = new Map();
|
||||
|
|
|
@ -12,47 +12,48 @@
|
|||
|
||||
import type {Console} from 'console';
|
||||
|
||||
type callback<T> = (error: ?Error, result?: T) => any;
|
||||
type callback2<T, T1> = (error: ?Error, a?: T, b?: T1) => any;
|
||||
export type Callback<A = void, B = void>
|
||||
= ((error: Error) => mixed)
|
||||
& ((error: null | void, a: A, b: B) => mixed);
|
||||
|
||||
type ResolveOptions = {
|
||||
log?: Console,
|
||||
};
|
||||
|
||||
type LoadOptions = {
|
||||
type LoadOptions = {|
|
||||
log?: Console,
|
||||
optimize?: boolean,
|
||||
platform?: string,
|
||||
};
|
||||
|};
|
||||
|
||||
type GraphOptions = {
|
||||
type GraphOptions = {|
|
||||
cwd?: string,
|
||||
log?: Console,
|
||||
optimize?: boolean,
|
||||
skip?: Set<string>,
|
||||
};
|
||||
|};
|
||||
|
||||
type Dependency = {
|
||||
type Dependency = {|
|
||||
id: string,
|
||||
path: string,
|
||||
};
|
||||
|};
|
||||
|
||||
export type File = {
|
||||
path: string,
|
||||
code?: string,
|
||||
export type File = {|
|
||||
ast: Object,
|
||||
};
|
||||
code?: string,
|
||||
path: string,
|
||||
|};
|
||||
|
||||
export type Module = {
|
||||
file: File,
|
||||
export type Module = {|
|
||||
dependencies: Array<Dependency>,
|
||||
};
|
||||
file: File,
|
||||
|};
|
||||
|
||||
export type GraphFn = (
|
||||
entryPoints: Iterable<string>,
|
||||
platform: string,
|
||||
options?: GraphOptions,
|
||||
callback?: callback<Array<Module>>,
|
||||
callback?: Callback<Array<Module>>,
|
||||
) => void;
|
||||
|
||||
export type ResolveFn = (
|
||||
|
@ -60,32 +61,49 @@ export type ResolveFn = (
|
|||
source: string,
|
||||
platform: string,
|
||||
options?: ResolveOptions,
|
||||
callback: callback<string>,
|
||||
callback: Callback<string>,
|
||||
) => void;
|
||||
|
||||
export type LoadFn = (
|
||||
file: string,
|
||||
options: LoadOptions,
|
||||
callback: callback2<File, Array<string>>,
|
||||
callback: Callback<File, Array<string>>,
|
||||
) => void;
|
||||
|
||||
type TransformResult = {
|
||||
export type TransformResult = {|
|
||||
code: string,
|
||||
dependencies: Array<string>,
|
||||
dependencyMapName?: string,
|
||||
map: ?Object,
|
||||
dependencies: Array<String>,
|
||||
};
|
||||
|};
|
||||
|
||||
export type TransformedFile = {
|
||||
file: string,
|
||||
code: string,
|
||||
transformed: {[variant: string]: TransformResult},
|
||||
file: string,
|
||||
hasteID: ?string,
|
||||
isPolyfill: boolean,
|
||||
package?: PackageData,
|
||||
transformed: {[variant: string]: TransformResult},
|
||||
};
|
||||
|
||||
export type PackageData = {
|
||||
name?: string,
|
||||
main?: string,
|
||||
export type PackageData = {|
|
||||
browser?: Object | string,
|
||||
main?: string,
|
||||
name?: string,
|
||||
'react-native'?: Object | string,
|
||||
};
|
||||
|};
|
||||
|
||||
export type TransformFnResult = {|
|
||||
ast: Object,
|
||||
map?: Object,
|
||||
|};
|
||||
|
||||
export type TransformFn = (
|
||||
data: {|
|
||||
filename: string,
|
||||
options?: Object,
|
||||
plugins?: Array<string | Object | [string | Object, any]>,
|
||||
sourceCode: string,
|
||||
|},
|
||||
callback: Callback<TransformFnResult>
|
||||
) => void;
|
||||
|
|
|
@ -5,257 +5,21 @@
|
|||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
// RUNS UNTRANSFORMED IN A WORKER PROCESS. ONLY USE NODE 4 COMPATIBLE FEATURES!
|
||||
const optimizeModule = require('./worker/optimize-module');
|
||||
const transformModule = require('./worker/transform-module');
|
||||
const wrapWorkerFn = require('./worker/wrap-worker-fn');
|
||||
|
||||
const babel = require('babel-core');
|
||||
const babelGenerate = require('babel-generator').default;
|
||||
const collectDependencies = require('../JSTransformer/worker/collect-dependencies');
|
||||
const constantFolding = require('../JSTransformer/worker/constant-folding').plugin;
|
||||
const docblock = require('../node-haste/DependencyGraph/docblock');
|
||||
const fs = require('fs');
|
||||
const inline = require('../JSTransformer/worker/inline').plugin;
|
||||
const minify = require('../JSTransformer/worker/minify');
|
||||
const mkdirp = require('mkdirp');
|
||||
const path = require('path');
|
||||
const series = require('async/series');
|
||||
const sourceMap = require('source-map');
|
||||
import type {OptimizationOptions} from './worker/optimize-module';
|
||||
import type {TransformOptions} from './worker/transform-module';
|
||||
import type {WorkerFnWithIO} from './worker/wrap-worker-fn';
|
||||
|
||||
const basename = path.basename;
|
||||
const dirname = path.dirname;
|
||||
const defaultVariants = {default: {}};
|
||||
const moduleFactoryParameters = ['require', 'module', 'global', 'exports'];
|
||||
const polyfillFactoryParameters = ['global'];
|
||||
|
||||
function transformJSON(infile, options, outfile, callback) {
|
||||
const json = fs.readFileSync(infile, 'utf8');
|
||||
const value = JSON.parse(json);
|
||||
|
||||
const filename = options.filename || infile;
|
||||
const code =
|
||||
`__d(function(${moduleFactoryParameters.join(', ')}) { module.exports = \n${
|
||||
json
|
||||
}\n})`;
|
||||
|
||||
const moduleData = {
|
||||
code,
|
||||
map: null, // no source map for JSON files!
|
||||
dependencies: [],
|
||||
};
|
||||
const transformed = {};
|
||||
|
||||
Object
|
||||
.keys(options.variants || defaultVariants)
|
||||
.forEach(key => (transformed[key] = moduleData));
|
||||
|
||||
const result = {
|
||||
file: filename,
|
||||
code: json,
|
||||
transformed,
|
||||
hasteID: value.name,
|
||||
};
|
||||
|
||||
if (basename(filename) === 'package.json') {
|
||||
result.package = {
|
||||
name: value.name,
|
||||
main: value.main,
|
||||
browser: value.browser,
|
||||
'react-native': value['react-native'],
|
||||
};
|
||||
}
|
||||
|
||||
writeResult(outfile, result);
|
||||
callback(null);
|
||||
}
|
||||
|
||||
function transformModule(infile, options, outfile, callback) {
|
||||
const filename = infile;
|
||||
if (filename.endsWith('.json')) {
|
||||
return transformJSON(infile, options, outfile, callback);
|
||||
}
|
||||
|
||||
const transform = require(options.transform);
|
||||
const code = fs.readFileSync(infile, 'utf8');
|
||||
|
||||
const variants = options.variants || defaultVariants;
|
||||
const tasks = {};
|
||||
Object.keys(variants).forEach(name => {
|
||||
tasks[name] = cb => transform({
|
||||
filename,
|
||||
sourceCode: code,
|
||||
options: variants[name],
|
||||
}, cb);
|
||||
});
|
||||
|
||||
series(tasks, (error, transformed) => {
|
||||
if (error) {
|
||||
callback(error);
|
||||
return;
|
||||
}
|
||||
|
||||
Object.keys(transformed).forEach(key => {
|
||||
transformed[key] =
|
||||
makeResult(transformed[key].ast, filename, code, options.polyfill);
|
||||
});
|
||||
|
||||
const annotations = docblock.parseAsObject(docblock.extract(code));
|
||||
|
||||
const result = {
|
||||
code,
|
||||
file: filename,
|
||||
isPolyfill: !!options.polyfill,
|
||||
hasteID: annotations.providesModule || annotations.provide || null,
|
||||
transformed,
|
||||
};
|
||||
|
||||
try {
|
||||
writeResult(outfile, result);
|
||||
} catch (writeError) {
|
||||
callback(writeError);
|
||||
return;
|
||||
}
|
||||
callback(null);
|
||||
});
|
||||
}
|
||||
|
||||
function optimizeModule(
|
||||
infile,
|
||||
outfile,
|
||||
optimizationOptions,
|
||||
callback,
|
||||
) {
|
||||
const data = JSON.parse(fs.readFileSync(infile, 'utf8'));
|
||||
const transformed = data.transformed;
|
||||
const result = Object.assign({}, data);
|
||||
result.transformed = {};
|
||||
|
||||
const file = data.file;
|
||||
const code = data.code;
|
||||
|
||||
Object.keys(transformed).forEach(key => {
|
||||
result.transformed[key] =
|
||||
optimize(transformed[key], file, code, optimizationOptions);
|
||||
});
|
||||
writeResult(outfile, result);
|
||||
|
||||
callback(null);
|
||||
}
|
||||
|
||||
function makeResult(ast, filename, sourceCode, isPolyfill = false) {
|
||||
const {dependencies, dependencyMapName} = isPolyfill
|
||||
? {dependencies: []}
|
||||
: collectDependencies(ast);
|
||||
const file = isPolyfill
|
||||
? wrapPolyfill(ast)
|
||||
: wrapModule(ast, dependencyMapName);
|
||||
|
||||
const gen = generate(file, filename, sourceCode);
|
||||
return {code: gen.code, map: gen.map, dependencies, dependencyMapName};
|
||||
}
|
||||
|
||||
function wrapModule(file, dependencyMapName) {
|
||||
const t = babel.types;
|
||||
const params = moduleFactoryParameters.concat(dependencyMapName);
|
||||
const factory = functionFromProgram(file.program, params);
|
||||
const def = t.callExpression(t.identifier('__d'), [factory]);
|
||||
return t.file(t.program([t.expressionStatement(def)]));
|
||||
}
|
||||
|
||||
function wrapPolyfill(file) {
|
||||
const t = babel.types;
|
||||
const factory = functionFromProgram(file.program, polyfillFactoryParameters);
|
||||
const iife = t.callExpression(factory, [t.identifier('this')]);
|
||||
return t.file(t.program([t.expressionStatement(iife)]));
|
||||
}
|
||||
|
||||
function functionFromProgram(program, parameters) {
|
||||
const t = babel.types;
|
||||
return t.functionExpression(
|
||||
t.identifier(''),
|
||||
parameters.map(makeIdentifier),
|
||||
t.blockStatement(program.body, program.directives),
|
||||
);
|
||||
}
|
||||
|
||||
function optimize(transformed, file, originalCode, options) {
|
||||
const optimized =
|
||||
optimizeCode(transformed.code, transformed.map, file, options);
|
||||
|
||||
const dependencies = options.isPolyfill
|
||||
? []
|
||||
: collectDependencies.forOptimization(
|
||||
optimized.ast,
|
||||
transformed.dependencies,
|
||||
transformed.dependencyMapName,
|
||||
);
|
||||
|
||||
const inputMap = transformed.map;
|
||||
const gen = generate(optimized.ast, file, originalCode);
|
||||
|
||||
const min = minify(
|
||||
file,
|
||||
gen.code,
|
||||
inputMap && mergeSourceMaps(file, inputMap, gen.map),
|
||||
);
|
||||
return {code: min.code, map: inputMap && min.map, dependencies};
|
||||
}
|
||||
|
||||
function optimizeCode(code, map, filename, inliningOptions) {
|
||||
return babel.transform(code, {
|
||||
plugins: [
|
||||
[constantFolding],
|
||||
[inline, Object.assign({isWrapped: true}, inliningOptions)],
|
||||
],
|
||||
babelrc: false,
|
||||
code: false,
|
||||
filename,
|
||||
});
|
||||
}
|
||||
|
||||
function generate(ast, filename, sourceCode) {
|
||||
return babelGenerate(ast, {
|
||||
comments: false,
|
||||
compact: true,
|
||||
filename,
|
||||
sourceFileName: filename,
|
||||
sourceMaps: true,
|
||||
sourceMapTarget: filename,
|
||||
}, sourceCode);
|
||||
}
|
||||
|
||||
function mergeSourceMaps(file, originalMap, secondMap) {
|
||||
const merged = new sourceMap.SourceMapGenerator();
|
||||
const inputMap = new sourceMap.SourceMapConsumer(originalMap);
|
||||
new sourceMap.SourceMapConsumer(secondMap)
|
||||
.eachMapping(mapping => {
|
||||
const original = inputMap.originalPositionFor({
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn,
|
||||
});
|
||||
if (original.line == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
merged.addMapping({
|
||||
generated: {line: mapping.generatedLine, column: mapping.generatedColumn},
|
||||
original: {line: original.line, column: original.column || 0},
|
||||
source: file,
|
||||
name: original.name || mapping.name,
|
||||
});
|
||||
});
|
||||
return merged.toJSON();
|
||||
}
|
||||
|
||||
function writeResult(outfile, result) {
|
||||
mkdirp.sync(dirname(outfile));
|
||||
fs.writeFileSync(outfile, JSON.stringify(result), 'utf8');
|
||||
}
|
||||
|
||||
function makeIdentifier(name) {
|
||||
return babel.types.identifier(name);
|
||||
}
|
||||
|
||||
exports.transformModule = transformModule;
|
||||
exports.optimizeModule = optimizeModule;
|
||||
exports.optimizeModule =
|
||||
(wrapWorkerFn(optimizeModule): WorkerFnWithIO<OptimizationOptions>);
|
||||
exports.transformModule =
|
||||
(wrapWorkerFn(transformModule): WorkerFnWithIO<TransformOptions>);
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
declare export function transformModule(
|
||||
infile: string,
|
||||
options: {
|
||||
filename?: string,
|
||||
transform: string,
|
||||
variants?: {[key: string]: Object},
|
||||
},
|
||||
outfile: string,
|
||||
callback: (e?: Error) => void,
|
||||
): void
|
||||
|
||||
declare export function optimizeModule(
|
||||
infile: string,
|
||||
outfile: string,
|
||||
optimizationOptions: {
|
||||
dev?: boolean,
|
||||
isPolyfill?: boolean,
|
||||
platform?: string,
|
||||
},
|
||||
callback: (e?: Error) => void,
|
||||
): void
|
||||
|
||||
declare function optimizeCode(
|
||||
code: string,
|
||||
map: Object,
|
||||
filename: string,
|
||||
inliningOptions: {|
|
||||
dev?: boolean,
|
||||
platform?: string,
|
||||
|},
|
||||
): {code: string, map: Object};
|
|
@ -0,0 +1,89 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest
|
||||
.disableAutomock()
|
||||
.setMock('fs', jest.genMockFromModule('fs'))
|
||||
.mock('mkdirp');
|
||||
|
||||
const wrapWorkerFn = require('../wrap-worker-fn');
|
||||
const {dirname} = require('path');
|
||||
const {fn} = require('../../test-helpers');
|
||||
|
||||
const {any} = jasmine;
|
||||
|
||||
describe('wrapWorkerFn:', () => {
|
||||
const infile = '/arbitrary/in/file';
|
||||
const outfile = '/arbitrary/in/file';
|
||||
|
||||
let workerFn, wrapped;
|
||||
beforeEach(() => {
|
||||
workerFn = fn();
|
||||
workerFn.stub.yields();
|
||||
wrapped = wrapWorkerFn(workerFn);
|
||||
});
|
||||
|
||||
const fs = require('fs');
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
it('reads the passed-in file synchronously as UTF-8', done => {
|
||||
wrapped(infile, outfile, {}, () => {
|
||||
expect(fs.readFileSync).toBeCalledWith(infile, 'utf8');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls the worker function with file contents and options', done => {
|
||||
const contents = 'arbitrary(contents);';
|
||||
const options = {arbitrary: 'options'};
|
||||
fs.readFileSync.mockReturnValue(contents);
|
||||
wrapped(infile, outfile, options, () => {
|
||||
expect(workerFn).toBeCalledWith(contents, options, any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('passes through any error that the worker function calls back with', done => {
|
||||
const error = new Error();
|
||||
workerFn.stub.yields(error);
|
||||
wrapped(infile, outfile, {}, e => {
|
||||
expect(e).toBe(error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('writes the result to disk', done => {
|
||||
const result = {arbitrary: 'result'};
|
||||
workerFn.stub.yields(null, result);
|
||||
wrapped(infile, outfile, {}, () => {
|
||||
expect(mkdirp.sync).toBeCalledWith(dirname(outfile));
|
||||
expect(fs.writeFileSync).toBeCalledWith(outfile, JSON.stringify(result), 'utf8');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls back with any error thrown by `mkdirp.sync`', done => {
|
||||
const error = new Error();
|
||||
mkdirp.sync.mockImplementationOnce(() => { throw error; });
|
||||
wrapped(infile, outfile, {}, e => {
|
||||
expect(e).toBe(error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls back with any error thrown by `fs.writeFileSync`', done => {
|
||||
const error = new Error();
|
||||
fs.writeFileSync.mockImplementationOnce(() => { throw error; });
|
||||
wrapped(infile, outfile, {}, e => {
|
||||
expect(e).toBe(error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -5,14 +5,21 @@
|
|||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
// RUNS UNTRANSFORMED IN A WORKER PROCESS. ONLY USE NODE 4 COMPATIBLE FEATURES!
|
||||
const nullthrows = require('fbjs/lib/nullthrows');
|
||||
|
||||
const {traverse, types} = require('babel-core');
|
||||
|
||||
type AST = Object;
|
||||
|
||||
class Replacement {
|
||||
nameToIndex: Map<string, number>;
|
||||
nextIndex: number;
|
||||
|
||||
constructor() {
|
||||
this.nameToIndex = new Map();
|
||||
this.nextIndex = 0;
|
||||
|
@ -47,6 +54,9 @@ class Replacement {
|
|||
}
|
||||
|
||||
class ProdReplacement {
|
||||
replacement: Replacement;
|
||||
names: Array<string>;
|
||||
|
||||
constructor(names) {
|
||||
this.replacement = new Replacement();
|
||||
this.names = names;
|
||||
|
@ -118,15 +128,16 @@ function collectDependencies(ast, replacement, dependencyMapIdentifier) {
|
|||
|
||||
return {
|
||||
dependencies: replacement.getNames(),
|
||||
dependencyMapName: traversalState.dependencyMapIdentifier.name,
|
||||
dependencyMapName: nullthrows(traversalState.dependencyMapIdentifier).name,
|
||||
};
|
||||
}
|
||||
|
||||
exports = module.exports =
|
||||
ast => collectDependencies(ast, new Replacement());
|
||||
(ast: AST) => collectDependencies(ast, new Replacement());
|
||||
exports.forOptimization =
|
||||
(ast, names, dependencyMapName) => collectDependencies(
|
||||
ast,
|
||||
new ProdReplacement(names),
|
||||
dependencyMapName && types.identifier(dependencyMapName),
|
||||
);
|
||||
(ast: AST, names: Array<string>, dependencyMapName?: string) =>
|
||||
collectDependencies(
|
||||
ast,
|
||||
new ProdReplacement(names),
|
||||
dependencyMapName && types.identifier(dependencyMapName),
|
||||
);
|
|
@ -0,0 +1,26 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const babelGenerate = require('babel-generator').default;
|
||||
|
||||
function generate(ast: Object, filename: string, sourceCode: string) {
|
||||
return babelGenerate(ast, {
|
||||
comments: false,
|
||||
compact: true,
|
||||
filename,
|
||||
sourceFileName: filename,
|
||||
sourceMaps: true,
|
||||
sourceMapTarget: filename,
|
||||
}, sourceCode);
|
||||
}
|
||||
|
||||
module.exports = generate;
|
|
@ -0,0 +1,106 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const babel = require('babel-core');
|
||||
const collectDependencies = require('./collect-dependencies');
|
||||
const constantFolding = require('../../JSTransformer/worker/constant-folding').plugin;
|
||||
const generate = require('./generate');
|
||||
const inline = require('../../JSTransformer/worker/inline').plugin;
|
||||
const minify = require('../../JSTransformer/worker/minify');
|
||||
const sourceMap = require('source-map');
|
||||
|
||||
import type {Callback, TransformedFile, TransformResult} from '../types.flow';
|
||||
|
||||
export type OptimizationOptions = {|
|
||||
dev?: boolean,
|
||||
isPolyfill?: boolean,
|
||||
platform?: string,
|
||||
|};
|
||||
|
||||
function optimizeModule(
|
||||
json: string,
|
||||
optimizationOptions: OptimizationOptions,
|
||||
callback: Callback<TransformedFile>,
|
||||
): void {
|
||||
const data: TransformedFile = JSON.parse(json);
|
||||
const {code, file, transformed} = data;
|
||||
const result = {...data, transformed: {}};
|
||||
|
||||
//$FlowIssue #14545724
|
||||
Object.entries(transformed).forEach(([k, t: TransformResult]: [*, TransformResult]) => {
|
||||
result.transformed[k] = optimize(t, file, code, optimizationOptions);
|
||||
});
|
||||
callback(null, result);
|
||||
}
|
||||
|
||||
function optimize(transformed, file, originalCode, options): TransformResult {
|
||||
const {code, dependencyMapName, map} = transformed;
|
||||
const optimized = optimizeCode(code, map, file, options);
|
||||
|
||||
let dependencies;
|
||||
if (options.isPolyfill) {
|
||||
dependencies = [];
|
||||
} else {
|
||||
({dependencies} = collectDependencies.forOptimization(
|
||||
optimized.ast,
|
||||
transformed.dependencies,
|
||||
dependencyMapName,
|
||||
));
|
||||
}
|
||||
|
||||
const inputMap = transformed.map;
|
||||
const gen = generate(optimized.ast, file, originalCode);
|
||||
|
||||
const min = minify(
|
||||
file,
|
||||
gen.code,
|
||||
inputMap && mergeSourceMaps(file, inputMap, gen.map),
|
||||
);
|
||||
return {code: min.code, map: inputMap && min.map, dependencies};
|
||||
}
|
||||
|
||||
function optimizeCode(code, map, filename, inliningOptions) {
|
||||
return babel.transform(code, {
|
||||
plugins: [
|
||||
[constantFolding],
|
||||
[inline, {...inliningOptions, isWrapped: true}],
|
||||
],
|
||||
babelrc: false,
|
||||
code: false,
|
||||
filename,
|
||||
});
|
||||
}
|
||||
|
||||
function mergeSourceMaps(file, originalMap, secondMap) {
|
||||
const merged = new sourceMap.SourceMapGenerator();
|
||||
const inputMap = new sourceMap.SourceMapConsumer(originalMap);
|
||||
new sourceMap.SourceMapConsumer(secondMap)
|
||||
.eachMapping(mapping => {
|
||||
const original = inputMap.originalPositionFor({
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn,
|
||||
});
|
||||
if (original.line == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
merged.addMapping({
|
||||
generated: {line: mapping.generatedLine, column: mapping.generatedColumn},
|
||||
original: {line: original.line, column: original.column || 0},
|
||||
source: file,
|
||||
name: original.name || mapping.name,
|
||||
});
|
||||
});
|
||||
return merged.toJSON();
|
||||
}
|
||||
|
||||
module.exports = optimizeModule;
|
|
@ -0,0 +1,165 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const babel = require('babel-core');
|
||||
const collectDependencies = require('./collect-dependencies');
|
||||
const docblock = require('../../node-haste/DependencyGraph/docblock');
|
||||
const generate = require('./generate');
|
||||
const series = require('async/series');
|
||||
|
||||
const {basename} = require('path');
|
||||
|
||||
import type {
|
||||
Callback,
|
||||
TransformedFile,
|
||||
TransformFn,
|
||||
TransformFnResult,
|
||||
TransformResult,
|
||||
} from '../types.flow';
|
||||
|
||||
type TransformVariants = {[key: string]: Object};
|
||||
export type TransformOptions = {|
|
||||
filename: string,
|
||||
polyfill?: boolean,
|
||||
transform: TransformFn,
|
||||
variants?: TransformVariants,
|
||||
|};
|
||||
|
||||
const defaultVariants = {default: {}};
|
||||
const moduleFactoryParameters = ['require', 'module', 'global', 'exports'];
|
||||
const polyfillFactoryParameters = ['global'];
|
||||
|
||||
function transformModule(
|
||||
code: string,
|
||||
options: TransformOptions,
|
||||
callback: Callback<TransformedFile>,
|
||||
): void {
|
||||
if (options.filename.endsWith('.json')) {
|
||||
return transformJSON(code, options, callback);
|
||||
}
|
||||
|
||||
const {filename, transform, variants = defaultVariants} = options;
|
||||
const tasks = {};
|
||||
Object.keys(variants).forEach(name => {
|
||||
tasks[name] = cb => transform({
|
||||
filename,
|
||||
sourceCode: code,
|
||||
options: variants[name],
|
||||
}, cb);
|
||||
});
|
||||
|
||||
series(tasks, (error, results: {[key: string]: TransformFnResult}) => {
|
||||
if (error) {
|
||||
callback(error);
|
||||
return;
|
||||
}
|
||||
|
||||
const transformed: {[key: string]: TransformResult} = {};
|
||||
|
||||
//$FlowIssue #14545724
|
||||
Object.entries(results).forEach(([key, value]: [*, TransformFnResult]) => {
|
||||
transformed[key] = makeResult(value.ast, filename, code, options.polyfill);
|
||||
});
|
||||
|
||||
const annotations = docblock.parseAsObject(docblock.extract(code));
|
||||
|
||||
callback(null, {
|
||||
code,
|
||||
file: filename,
|
||||
isPolyfill: !!options.polyfill,
|
||||
hasteID: annotations.providesModule || annotations.provide || null,
|
||||
transformed,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function transformJSON(json, options, callback) {
|
||||
const value = JSON.parse(json);
|
||||
const {filename} = options;
|
||||
const code =
|
||||
`__d(function(${moduleFactoryParameters.join(', ')}) { module.exports = \n${
|
||||
json
|
||||
}\n})`;
|
||||
|
||||
const moduleData = {
|
||||
code,
|
||||
map: null, // no source map for JSON files!
|
||||
dependencies: [],
|
||||
};
|
||||
const transformed = {};
|
||||
|
||||
Object
|
||||
.keys(options.variants || defaultVariants)
|
||||
.forEach(key => (transformed[key] = moduleData));
|
||||
|
||||
const result: TransformedFile = {
|
||||
code: json,
|
||||
file: filename,
|
||||
hasteID: value.name,
|
||||
isPolyfill: false,
|
||||
transformed,
|
||||
};
|
||||
|
||||
if (basename(filename) === 'package.json') {
|
||||
result.package = {
|
||||
name: value.name,
|
||||
main: value.main,
|
||||
browser: value.browser,
|
||||
'react-native': value['react-native'],
|
||||
};
|
||||
}
|
||||
callback(null, result);
|
||||
}
|
||||
|
||||
function makeResult(ast, filename, sourceCode, isPolyfill = false) {
|
||||
let dependencies, dependencyMapName, file;
|
||||
if (isPolyfill) {
|
||||
dependencies = [];
|
||||
file = wrapPolyfill(ast);
|
||||
} else {
|
||||
({dependencies, dependencyMapName} = collectDependencies(ast));
|
||||
file = wrapModule(ast, dependencyMapName);
|
||||
}
|
||||
|
||||
const gen = generate(file, filename, sourceCode);
|
||||
return {code: gen.code, map: gen.map, dependencies, dependencyMapName};
|
||||
}
|
||||
|
||||
function wrapModule(file, dependencyMapName) {
|
||||
const t = babel.types;
|
||||
const params = moduleFactoryParameters.concat(dependencyMapName);
|
||||
const factory = functionFromProgram(file.program, params);
|
||||
const def = t.callExpression(t.identifier('__d'), [factory]);
|
||||
return t.file(t.program([t.expressionStatement(def)]));
|
||||
}
|
||||
|
||||
function wrapPolyfill(file) {
|
||||
const t = babel.types;
|
||||
const factory = functionFromProgram(file.program, polyfillFactoryParameters);
|
||||
const iife = t.callExpression(factory, [t.identifier('this')]);
|
||||
return t.file(t.program([t.expressionStatement(iife)]));
|
||||
}
|
||||
|
||||
function functionFromProgram(program, parameters) {
|
||||
const t = babel.types;
|
||||
return t.functionExpression(
|
||||
t.identifier(''),
|
||||
parameters.map(makeIdentifier),
|
||||
t.blockStatement(program.body, program.directives),
|
||||
);
|
||||
}
|
||||
|
||||
function makeIdentifier(name) {
|
||||
return babel.types.identifier(name);
|
||||
}
|
||||
|
||||
module.exports = transformModule;
|
|
@ -0,0 +1,62 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
const {dirname} = require('path');
|
||||
|
||||
import type {Callback} from '../types.flow';
|
||||
|
||||
type Path = string;
|
||||
type WorkerFn<Options> = (
|
||||
fileContents: string,
|
||||
options: Options,
|
||||
callback: Callback<Object>,
|
||||
) => void;
|
||||
export type WorkerFnWithIO<Options> = (
|
||||
infile: Path,
|
||||
outfile: Path,
|
||||
options: Options,
|
||||
callback: Callback<>,
|
||||
) => void;
|
||||
|
||||
function wrapWorkerFn<Options>(
|
||||
workerFunction: WorkerFn<Options>,
|
||||
): WorkerFnWithIO<Options> {
|
||||
return (
|
||||
infile: Path,
|
||||
outfile: Path,
|
||||
options: Options,
|
||||
callback: Callback<>,
|
||||
) => {
|
||||
const contents = fs.readFileSync(infile, 'utf8');
|
||||
workerFunction(contents, options, (error, result) => {
|
||||
if (error) {
|
||||
callback(error);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
mkdirp.sync(dirname(outfile));
|
||||
fs.writeFileSync(outfile, JSON.stringify(result), 'utf8');
|
||||
} catch (writeError) {
|
||||
callback(writeError);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(null);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = wrapWorkerFn;
|
Loading…
Reference in New Issue