mirror of https://github.com/status-im/metro.git
metro-bundler: ModuleGraph/output: @format
Reviewed By: cpojer Differential Revision: D5745469 fbshipit-source-id: 7aa923736d6eb257f547a6d41b4329ce3406c805
This commit is contained in:
parent
9254941133
commit
1c6a96f2b3
|
@ -6,8 +6,11 @@
|
|||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @emails oncall+javascript_tools
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
declare var jest: any;
|
||||
|
@ -39,25 +42,28 @@ describe('dependencies-dot', () => {
|
|||
});
|
||||
|
||||
it('produces an ordered file for a standard list of modules', () => {
|
||||
expect(dependenciesDot({modules: [
|
||||
createModule('a', ['b']),
|
||||
createModule('b', ['c']),
|
||||
createModule('c', []),
|
||||
]})).toBeAMultilineString(
|
||||
'digraph {',
|
||||
'\t"a" -> "b";',
|
||||
'\t"b" -> "c";',
|
||||
'}',
|
||||
);
|
||||
expect(
|
||||
dependenciesDot({
|
||||
modules: [
|
||||
createModule('a', ['b']),
|
||||
createModule('b', ['c']),
|
||||
createModule('c', []),
|
||||
],
|
||||
}),
|
||||
).toBeAMultilineString('digraph {', '\t"a" -> "b";', '\t"b" -> "c";', '}');
|
||||
});
|
||||
|
||||
it('writes one entry per dependency', () => {
|
||||
expect(dependenciesDot({modules: [
|
||||
createModule('a', ['b', 'c']),
|
||||
createModule('b', ['d']),
|
||||
createModule('c', []),
|
||||
createModule('d', []),
|
||||
]})).toBeAMultilineString(
|
||||
expect(
|
||||
dependenciesDot({
|
||||
modules: [
|
||||
createModule('a', ['b', 'c']),
|
||||
createModule('b', ['d']),
|
||||
createModule('c', []),
|
||||
createModule('d', []),
|
||||
],
|
||||
}),
|
||||
).toBeAMultilineString(
|
||||
'digraph {',
|
||||
'\t"a" -> "b";',
|
||||
'\t"a" -> "c";',
|
||||
|
@ -67,26 +73,19 @@ describe('dependencies-dot', () => {
|
|||
});
|
||||
|
||||
it('handles non-printable characters', () => {
|
||||
expect(dependenciesDot({modules: [
|
||||
createModule('"\n', ['\r\t']),
|
||||
createModule('\r\t', []),
|
||||
]})).toBeAMultilineString(
|
||||
'digraph {',
|
||||
'\t"\\"\\n" -> "\\r\\t";',
|
||||
'}',
|
||||
);
|
||||
expect(
|
||||
dependenciesDot({
|
||||
modules: [createModule('"\n', ['\r\t']), createModule('\r\t', [])],
|
||||
}),
|
||||
).toBeAMultilineString('digraph {', '\t"\\"\\n" -> "\\r\\t";', '}');
|
||||
});
|
||||
|
||||
it('handles circular dependencies', () => {
|
||||
expect(dependenciesDot({modules: [
|
||||
createModule('a', ['b']),
|
||||
createModule('b', ['a']),
|
||||
]})).toBeAMultilineString(
|
||||
'digraph {',
|
||||
'\t"a" -> "b";',
|
||||
'\t"b" -> "a";',
|
||||
'}',
|
||||
);
|
||||
expect(
|
||||
dependenciesDot({
|
||||
modules: [createModule('a', ['b']), createModule('b', ['a'])],
|
||||
}),
|
||||
).toBeAMultilineString('digraph {', '\t"a" -> "b";', '\t"b" -> "a";', '}');
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -6,8 +6,11 @@
|
|||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @emails oncall+javascript_tools
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
declare var jest: any;
|
||||
|
@ -41,7 +44,7 @@ beforeAll(() => {
|
|||
});
|
||||
|
||||
it('starts the bundle file with the magic number', () => {
|
||||
expect(code.readUInt32LE(0)).toBe(0xFB0BD1E5);
|
||||
expect(code.readUInt32LE(0)).toBe(0xfb0bd1e5);
|
||||
});
|
||||
|
||||
it('contains the number of modules in the module table', () => {
|
||||
|
@ -49,34 +52,41 @@ it('contains the number of modules in the module table', () => {
|
|||
});
|
||||
|
||||
it('has the length correct of the startup section', () => {
|
||||
expect(code.readUInt32LE(SIZEOF_INT32 * 2))
|
||||
.toBe(requireCall.file.code.length + 1);
|
||||
expect(code.readUInt32LE(SIZEOF_INT32 * 2)).toBe(
|
||||
requireCall.file.code.length + 1,
|
||||
);
|
||||
});
|
||||
|
||||
it('contains the code after the offset table', () => {
|
||||
const {codeOffset, startupSectionLength, table} = parseOffsetTable(code);
|
||||
|
||||
const startupSection =
|
||||
code.slice(codeOffset, codeOffset + startupSectionLength - 1);
|
||||
const startupSection = code.slice(
|
||||
codeOffset,
|
||||
codeOffset + startupSectionLength - 1,
|
||||
);
|
||||
expect(startupSection.toString()).toBe(requireCall.file.code);
|
||||
|
||||
table.forEach(([offset, length], i) => {
|
||||
const moduleCode =
|
||||
code.slice(codeOffset + offset, codeOffset + offset + length - 1);
|
||||
const moduleCode = code.slice(
|
||||
codeOffset + offset,
|
||||
codeOffset + offset + length - 1,
|
||||
);
|
||||
expect(moduleCode.toString()).toBe(expectedCode(modules[i]));
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a source map', () => {
|
||||
let line = countLines(requireCall);
|
||||
expect(map.sections.slice(1)).toEqual(modules.map(m => {
|
||||
const section = {
|
||||
map: m.file.map || lineByLineMap(m.file.path),
|
||||
offset: {column: 0, line},
|
||||
};
|
||||
line += countLines(m);
|
||||
return section;
|
||||
}));
|
||||
expect(map.sections.slice(1)).toEqual(
|
||||
modules.map(m => {
|
||||
const section = {
|
||||
map: m.file.map || lineByLineMap(m.file.path),
|
||||
offset: {column: 0, line},
|
||||
};
|
||||
line += countLines(m);
|
||||
return section;
|
||||
}),
|
||||
);
|
||||
expect(map.x_facebook_offsets).toEqual([1, 2, 3, 4, 5, 6]);
|
||||
});
|
||||
|
||||
|
@ -91,11 +101,13 @@ describe('Startup section optimization', () => {
|
|||
it('supports additional modules in the startup section', () => {
|
||||
const {codeOffset, startupSectionLength, table} = parseOffsetTable(code);
|
||||
|
||||
const startupSection =
|
||||
code.slice(codeOffset, codeOffset + startupSectionLength - 1);
|
||||
expect(startupSection.toString())
|
||||
.toBe(preloaded.concat([requireCall]).map(expectedCode).join('\n'));
|
||||
|
||||
const startupSection = code.slice(
|
||||
codeOffset,
|
||||
codeOffset + startupSectionLength - 1,
|
||||
);
|
||||
expect(startupSection.toString()).toBe(
|
||||
preloaded.concat([requireCall]).map(expectedCode).join('\n'),
|
||||
);
|
||||
|
||||
preloaded.forEach(m => {
|
||||
const idx = idForPath(m.file);
|
||||
|
@ -104,8 +116,10 @@ describe('Startup section optimization', () => {
|
|||
|
||||
table.forEach(([offset, length], i) => {
|
||||
if (offset !== 0 && length !== 0) {
|
||||
const moduleCode =
|
||||
code.slice(codeOffset + offset, codeOffset + offset + length - 1);
|
||||
const moduleCode = code.slice(
|
||||
codeOffset + offset,
|
||||
codeOffset + offset + length - 1,
|
||||
);
|
||||
expect(moduleCode.toString()).toBe(expectedCode(modules[i]));
|
||||
}
|
||||
});
|
||||
|
@ -117,30 +131,25 @@ describe('Startup section optimization', () => {
|
|||
countLines(requireCall),
|
||||
);
|
||||
|
||||
expect(map.x_facebook_offsets).toEqual([4, 5,,, 6]); // eslint-disable-line no-sparse-arrays
|
||||
expect(map.x_facebook_offsets).toEqual([4, 5, , , 6]); // eslint-disable-line no-sparse-arrays
|
||||
|
||||
expect(map.sections.slice(1)).toEqual(
|
||||
modules
|
||||
.filter(not(Set.prototype.has), new Set(preloaded))
|
||||
.map(m => {
|
||||
const section = {
|
||||
map: m.file.map || lineByLineMap(m.file.path),
|
||||
offset: {column: 0, line},
|
||||
};
|
||||
line += countLines(m);
|
||||
return section;
|
||||
}
|
||||
));
|
||||
modules.filter(not(Set.prototype.has), new Set(preloaded)).map(m => {
|
||||
const section = {
|
||||
map: m.file.map || lineByLineMap(m.file.path),
|
||||
offset: {column: 0, line},
|
||||
};
|
||||
line += countLines(m);
|
||||
return section;
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('RAM groups / common sections', () => {
|
||||
let groups, groupHeads;
|
||||
beforeAll(() => {
|
||||
groups = [
|
||||
[modules[1], modules[2], modules[5]],
|
||||
[modules[3], modules[4]],
|
||||
];
|
||||
groups = [[modules[1], modules[2], modules[5]], [modules[3], modules[4]]];
|
||||
groupHeads = groups.map(g => g[0]);
|
||||
({code, map} = createRamBundle(undefined, groupHeads.map(getPath)));
|
||||
});
|
||||
|
@ -154,9 +163,11 @@ describe('RAM groups / common sections', () => {
|
|||
deps.forEach(id => expect(table[id]).toEqual(groupEntry));
|
||||
|
||||
const [offset, length] = groupEntry;
|
||||
const groupCode = code.slice(codeOffset + offset, codeOffset + offset + length - 1);
|
||||
expect(groupCode.toString())
|
||||
.toEqual(group.map(expectedCode).join('\n'));
|
||||
const groupCode = code.slice(
|
||||
codeOffset + offset,
|
||||
codeOffset + offset + length - 1,
|
||||
);
|
||||
expect(groupCode.toString()).toEqual(group.map(expectedCode).join('\n'));
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -165,19 +176,21 @@ describe('RAM groups / common sections', () => {
|
|||
const maps = map.sections.slice(-2);
|
||||
const toplevelOffsets = [2, 5];
|
||||
|
||||
maps.map((groupMap, i) => [groups[i], groupMap]).forEach(([group, groupMap], i) => {
|
||||
const offsets = group.reduce(moduleLineOffsets, [])[0];
|
||||
expect(groupMap).toEqual({
|
||||
map: {
|
||||
version: 3,
|
||||
sections: group.map((module, j) => ({
|
||||
map: module.file.map,
|
||||
offset: {line: offsets[j], column: 0},
|
||||
})),
|
||||
},
|
||||
offset: {line: toplevelOffsets[i], column: 0},
|
||||
maps
|
||||
.map((groupMap, i) => [groups[i], groupMap])
|
||||
.forEach(([group, groupMap], i) => {
|
||||
const offsets = group.reduce(moduleLineOffsets, [])[0];
|
||||
expect(groupMap).toEqual({
|
||||
map: {
|
||||
version: 3,
|
||||
sections: group.map((module, j) => ({
|
||||
map: module.file.map,
|
||||
offset: {line: offsets[j], column: 0},
|
||||
})),
|
||||
},
|
||||
offset: {line: toplevelOffsets[i], column: 0},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function moduleLineOffsets([offsets = [], line = 0], module) {
|
||||
|
@ -200,15 +213,18 @@ function createRamBundle(preloadedModules = new Set(), ramGroups) {
|
|||
return {code: result.code, map: result.map};
|
||||
}
|
||||
|
||||
function makeModule(name, deps = [], type = 'module', moduleCode = `var ${name};`) {
|
||||
function makeModule(
|
||||
name,
|
||||
deps = [],
|
||||
type = 'module',
|
||||
moduleCode = `var ${name};`,
|
||||
) {
|
||||
const path = makeModulePath(name);
|
||||
return {
|
||||
dependencies: deps.map(makeDependency),
|
||||
file: {
|
||||
code: type === 'module' ? makeModuleCode(moduleCode) : moduleCode,
|
||||
map: type !== 'module'
|
||||
? null
|
||||
: makeModuleMap(name, path),
|
||||
map: type !== 'module' ? null : makeModuleMap(name, path),
|
||||
path,
|
||||
type,
|
||||
},
|
||||
|
@ -268,7 +284,10 @@ function parseOffsetTable(buffer) {
|
|||
const table = Array(n);
|
||||
for (let i = 0; i < n; ++i) {
|
||||
const offset = baseOffset + i * 2 * SIZEOF_INT32;
|
||||
table[i] = [buffer.readUInt32LE(offset), buffer.readUInt32LE(offset + SIZEOF_INT32)];
|
||||
table[i] = [
|
||||
buffer.readUInt32LE(offset),
|
||||
buffer.readUInt32LE(offset + SIZEOF_INT32),
|
||||
];
|
||||
}
|
||||
return {
|
||||
codeOffset: baseOffset + n * 2 * SIZEOF_INT32,
|
||||
|
@ -291,4 +310,7 @@ function lineByLineMap(file) {
|
|||
};
|
||||
}
|
||||
|
||||
const not = fn => function() { return !fn.apply(this, arguments); };
|
||||
const not = fn =>
|
||||
function() {
|
||||
return !fn.apply(this, arguments);
|
||||
};
|
||||
|
|
|
@ -6,8 +6,11 @@
|
|||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @emails oncall+javascript_tools
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
declare var jest: any;
|
||||
|
@ -42,7 +45,7 @@ beforeAll(() => {
|
|||
});
|
||||
|
||||
it('does not start the bundle file with the magic number (not a binary one)', () => {
|
||||
expect(new Buffer(code).readUInt32LE(0)).not.toBe(0xFB0BD1E5);
|
||||
expect(new Buffer(code).readUInt32LE(0)).not.toBe(0xfb0bd1e5);
|
||||
});
|
||||
|
||||
it('contains the startup code on the main file', () => {
|
||||
|
@ -51,14 +54,16 @@ it('contains the startup code on the main file', () => {
|
|||
|
||||
it('creates a source map', () => {
|
||||
let line = countLines(requireCall);
|
||||
expect(map.sections.slice(1)).toEqual(modules.map(m => {
|
||||
const section = {
|
||||
map: m.file.map || lineByLineMap(m.file.path),
|
||||
offset: {column: 0, line},
|
||||
};
|
||||
line += countLines(m);
|
||||
return section;
|
||||
}));
|
||||
expect(map.sections.slice(1)).toEqual(
|
||||
modules.map(m => {
|
||||
const section = {
|
||||
map: m.file.map || lineByLineMap(m.file.path),
|
||||
offset: {column: 0, line},
|
||||
};
|
||||
line += countLines(m);
|
||||
return section;
|
||||
}),
|
||||
);
|
||||
expect(map.x_facebook_offsets).toEqual([1, 2, 3, 4, 5, 6]);
|
||||
});
|
||||
|
||||
|
@ -67,7 +72,7 @@ it('creates a magic file with the number', () => {
|
|||
// $FlowFixMe "extraFiles" is always defined at this point.
|
||||
expect(extraFiles.get('UNBUNDLE')).toBeDefined();
|
||||
// $FlowFixMe "extraFiles" is always defined at this point.
|
||||
expect(extraFiles.get('UNBUNDLE').readUInt32LE(0)).toBe(0xFB0BD1E5);
|
||||
expect(extraFiles.get('UNBUNDLE').readUInt32LE(0)).toBe(0xfb0bd1e5);
|
||||
});
|
||||
|
||||
it('bundles each file separately', () => {
|
||||
|
@ -75,13 +80,17 @@ it('bundles each file separately', () => {
|
|||
|
||||
modules.forEach((module, i) => {
|
||||
// $FlowFixMe "extraFiles" is always defined at this point.
|
||||
expect(extraFiles.get(`js-modules/${i}.js`).toString())
|
||||
.toBe(getModuleCode(modules[i], idForPath));
|
||||
expect(extraFiles.get(`js-modules/${i}.js`).toString()).toBe(
|
||||
getModuleCode(modules[i], idForPath),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
function createRamBundle(preloadedModules = new Set(), ramGroups) {
|
||||
const build = multipleFilesRamBundle.createBuilder(preloadedModules, ramGroups);
|
||||
const build = multipleFilesRamBundle.createBuilder(
|
||||
preloadedModules,
|
||||
ramGroups,
|
||||
);
|
||||
const result = build({
|
||||
filename: 'arbitrary/filename.js',
|
||||
idForPath,
|
||||
|
@ -92,15 +101,18 @@ function createRamBundle(preloadedModules = new Set(), ramGroups) {
|
|||
return {code: result.code, map: result.map, extraFiles: result.extraFiles};
|
||||
}
|
||||
|
||||
function makeModule(name, deps = [], type = 'module', moduleCode = `var ${name};`) {
|
||||
function makeModule(
|
||||
name,
|
||||
deps = [],
|
||||
type = 'module',
|
||||
moduleCode = `var ${name};`,
|
||||
) {
|
||||
const path = makeModulePath(name);
|
||||
return {
|
||||
dependencies: deps.map(makeDependency),
|
||||
file: {
|
||||
code: type === 'module' ? makeModuleCode(moduleCode) : moduleCode,
|
||||
map: type !== 'module'
|
||||
? null
|
||||
: makeModuleMap(name, path),
|
||||
map: type !== 'module' ? null : makeModuleMap(name, path),
|
||||
path,
|
||||
type,
|
||||
},
|
||||
|
|
|
@ -5,15 +5,16 @@
|
|||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @emails oncall+javascript_tools
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const {match} = require('sinon');
|
||||
const {fn} = require('../../test-helpers');
|
||||
const {
|
||||
addModuleIdsToModuleWrapper,
|
||||
createIdForPathFn,
|
||||
} = require('../util');
|
||||
const {addModuleIdsToModuleWrapper, createIdForPathFn} = require('../util');
|
||||
const {match} = require('sinon');
|
||||
|
||||
const {any} = jasmine;
|
||||
|
||||
|
@ -33,18 +34,23 @@ describe('`addModuleIdsToModuleWrapper`:', () => {
|
|||
|
||||
const idForPath = fn();
|
||||
idForPath.stub
|
||||
.withArgs(match({path})).returns(12)
|
||||
.withArgs(match({path: dependencies[0].path})).returns(345)
|
||||
.withArgs(match({path: dependencies[1].path})).returns(6);
|
||||
.withArgs(match({path}))
|
||||
.returns(12)
|
||||
.withArgs(match({path: dependencies[0].path}))
|
||||
.returns(345)
|
||||
.withArgs(match({path: dependencies[1].path}))
|
||||
.returns(6);
|
||||
|
||||
expect(addModuleIdsToModuleWrapper(module, idForPath))
|
||||
.toEqual('__d(function(){},12,[345,6]);');
|
||||
expect(addModuleIdsToModuleWrapper(module, idForPath)).toEqual(
|
||||
'__d(function(){},12,[345,6]);',
|
||||
);
|
||||
});
|
||||
|
||||
it('omits the array of dependency IDs if it is empty', () => {
|
||||
const module = createModule();
|
||||
expect(addModuleIdsToModuleWrapper(module, () => 98))
|
||||
.toEqual(`__d(function(){},${98});`);
|
||||
expect(addModuleIdsToModuleWrapper(module, () => 98)).toEqual(
|
||||
`__d(function(){},${98});`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -7,7 +7,9 @@
|
|||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import type {Module} from '../types.flow';
|
||||
|
|
|
@ -7,14 +7,19 @@
|
|||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const buildSourceMapWithMetaData = require('../../shared/output/unbundle/build-unbundle-sourcemap-with-metadata.js');
|
||||
const nullthrows = require('fbjs/lib/nullthrows');
|
||||
|
||||
const {createRamBundleGroups} = require('../../Bundler/util');
|
||||
const {buildTableAndContents, createModuleGroups} = require('../../shared/output/unbundle/as-indexed-file');
|
||||
const {
|
||||
buildTableAndContents,
|
||||
createModuleGroups,
|
||||
} = require('../../shared/output/unbundle/as-indexed-file');
|
||||
const {concat, getModuleCode, partition, toModuleTransport} = require('./util');
|
||||
|
||||
import type {FBIndexMap} from '../../lib/SourceMap.js';
|
||||
|
@ -31,7 +36,11 @@ function asIndexedRamBundle({
|
|||
const [startup, deferred] = partition(modules, preloadedModules);
|
||||
const startupModules = Array.from(concat(startup, requireCalls));
|
||||
const deferredModules = deferred.map(m => toModuleTransport(m, idForPath));
|
||||
const ramGroups = createRamBundleGroups(ramGroupHeads || [], deferredModules, subtree);
|
||||
const ramGroups = createRamBundleGroups(
|
||||
ramGroupHeads || [],
|
||||
deferredModules,
|
||||
subtree,
|
||||
);
|
||||
const moduleGroups = createModuleGroups(ramGroups, deferredModules);
|
||||
|
||||
const tableAndContents = buildTableAndContents(
|
||||
|
@ -52,17 +61,13 @@ function asIndexedRamBundle({
|
|||
};
|
||||
}
|
||||
|
||||
function *subtree(
|
||||
moduleTransport,
|
||||
moduleTransportsByPath,
|
||||
seen = new Set(),
|
||||
) {
|
||||
function* subtree(moduleTransport, moduleTransportsByPath, seen = new Set()) {
|
||||
seen.add(moduleTransport.id);
|
||||
for (const {path} of moduleTransport.dependencies) {
|
||||
const dependency = nullthrows(moduleTransportsByPath.get(path));
|
||||
if (!seen.has(dependency.id)) {
|
||||
yield dependency.id;
|
||||
yield *subtree(dependency, moduleTransportsByPath, seen);
|
||||
yield* subtree(dependency, moduleTransportsByPath, seen);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,9 @@
|
|||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MAGIC_UNBUNDLE_NUMBER = require('../../shared/output/unbundle/magic-number');
|
||||
|
|
|
@ -7,7 +7,9 @@
|
|||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const meta = require('../../shared/output/meta');
|
||||
|
|
|
@ -7,7 +7,9 @@
|
|||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import type {FBSourceMap, IndexMapSection, IndexMap} from '../../lib/SourceMap';
|
||||
|
@ -16,11 +18,11 @@ export type {FBSourceMap};
|
|||
|
||||
type CreateIndexMapOptions = {|
|
||||
file?: string,
|
||||
sections?: Array<IndexMapSection>
|
||||
sections?: Array<IndexMapSection>,
|
||||
|};
|
||||
|
||||
exports.createIndexMap = (opts?: CreateIndexMapOptions): IndexMap => ({
|
||||
version: 3,
|
||||
file: opts && opts.file,
|
||||
sections: opts && opts.sections || [],
|
||||
sections: (opts && opts.sections) || [],
|
||||
});
|
||||
|
|
|
@ -7,7 +7,9 @@
|
|||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const virtualModule = require('../module').virtual;
|
||||
|
@ -23,7 +25,7 @@ import type {IdForPathFn, Module} from '../types.flow';
|
|||
// the dependencies of the module before the closing parenthesis.
|
||||
function addModuleIdsToModuleWrapper(
|
||||
module: Module,
|
||||
idForPath: {path: string} => number,
|
||||
idForPath: ({path: string}) => number,
|
||||
): string {
|
||||
const {dependencies, file} = module;
|
||||
const {code} = file;
|
||||
|
@ -36,24 +38,17 @@ function addModuleIdsToModuleWrapper(
|
|||
|
||||
// This code runs for both development and production builds, after
|
||||
// minification. That's why we leave out all spaces.
|
||||
const depencyIds =
|
||||
dependencies.length ? `,[${dependencies.map(idForPath).join(',')}]` : '';
|
||||
return (
|
||||
code.slice(0, index) +
|
||||
`,${fileId}` +
|
||||
depencyIds +
|
||||
code.slice(index)
|
||||
);
|
||||
const depencyIds = dependencies.length
|
||||
? `,[${dependencies.map(idForPath).join(',')}]`
|
||||
: '';
|
||||
return code.slice(0, index) + `,${fileId}` + depencyIds + code.slice(index);
|
||||
}
|
||||
|
||||
exports.addModuleIdsToModuleWrapper = addModuleIdsToModuleWrapper;
|
||||
|
||||
// Adds the module ids to a file if the file is a module. If it's not (e.g. a
|
||||
// script) it just keeps it as-is.
|
||||
function getModuleCode(
|
||||
module: Module,
|
||||
idForPath: IdForPathFn,
|
||||
) {
|
||||
function getModuleCode(module: Module, idForPath: IdForPathFn) {
|
||||
const {file} = module;
|
||||
return file.type === 'module'
|
||||
? addModuleIdsToModuleWrapper(module, idForPath)
|
||||
|
@ -73,7 +68,7 @@ exports.concat = function* concat<T>(
|
|||
|
||||
// Creates an idempotent function that returns numeric IDs for objects based
|
||||
// on their `path` property.
|
||||
exports.createIdForPathFn = (): ({path: string} => number) => {
|
||||
exports.createIdForPathFn = (): (({path: string}) => number) => {
|
||||
const seen = new Map();
|
||||
let next = 0;
|
||||
return ({path}) => {
|
||||
|
@ -88,7 +83,7 @@ exports.createIdForPathFn = (): ({path: string} => number) => {
|
|||
|
||||
// creates a series of virtual modules with require calls to the passed-in
|
||||
// modules.
|
||||
exports.requireCallsTo = function* (
|
||||
exports.requireCallsTo = function*(
|
||||
modules: Iterable<Module>,
|
||||
idForPath: IdForPathFn,
|
||||
): Iterable<Module> {
|
||||
|
@ -114,10 +109,7 @@ exports.partition = (
|
|||
|
||||
// Transforms a new Module object into an old one, so that it can be passed
|
||||
// around code.
|
||||
exports.toModuleTransport = (
|
||||
module: Module,
|
||||
idForPath: IdForPathFn,
|
||||
) => {
|
||||
exports.toModuleTransport = (module: Module, idForPath: IdForPathFn) => {
|
||||
const {dependencies, file} = module;
|
||||
return {
|
||||
code: getModuleCode(module, idForPath),
|
||||
|
|
Loading…
Reference in New Issue