Remove react-native/packager folder.
Summary: This folder is not necessary any longer. All the code now lives in https://github.com/facebook/metro-bundler Reviewed By: davidaurelio, jeanlauliac Differential Revision: D5199196 fbshipit-source-id: 35bf0f10a9163f53426db9a76f8f853dceb69167
This commit is contained in:
parent
0bc3bb06ed
commit
236e9e4d01
|
@ -13,17 +13,14 @@
|
|||
const babel = require('babel-core');
|
||||
const babelRegisterOnly = require('metro-bundler/build/babelRegisterOnly');
|
||||
const createCacheKeyFunction = require('fbjs-scripts/jest/createCacheKeyFunction');
|
||||
const path = require('path');
|
||||
const transformer = require('metro-bundler/build/transformer.js');
|
||||
|
||||
const nodeFiles = RegExp([
|
||||
'/local-cli/',
|
||||
'/packager/(?!src/Resolver/polyfills/)',
|
||||
].join('|'));
|
||||
const nodeOptions = babelRegisterOnly.config([nodeFiles]);
|
||||
|
||||
babelRegisterOnly([]);
|
||||
// has to be required after setting up babelRegisterOnly
|
||||
const transformer = require('metro-bundler/build/transformer.js');
|
||||
|
||||
module.exports = {
|
||||
process(src/*: string*/, file/*: string*/) {
|
||||
|
@ -49,7 +46,7 @@ module.exports = {
|
|||
|
||||
getCacheKey: createCacheKeyFunction([
|
||||
__filename,
|
||||
path.join(__dirname, '../packager/src/transformer.js'),
|
||||
require.resolve('metro-bundler/build/transformer.js'),
|
||||
require.resolve('babel-core/package.json'),
|
||||
]),
|
||||
};
|
||||
|
|
|
@ -10,9 +10,9 @@
|
|||
|
||||
const mockComponent = require.requireActual('./mockComponent');
|
||||
|
||||
require.requireActual('../packager/src/Resolver/polyfills/babelHelpers.js');
|
||||
require.requireActual('../packager/src/Resolver/polyfills/Object.es7.js');
|
||||
require.requireActual('../packager/src/Resolver/polyfills/error-guard');
|
||||
require.requireActual('metro-bundler/build/Resolver/polyfills/babelHelpers.js');
|
||||
require.requireActual('metro-bundler/build/Resolver/polyfills/Object.es7.js');
|
||||
require.requireActual('metro-bundler/build/Resolver/polyfills/error-guard');
|
||||
|
||||
global.__DEV__ = true;
|
||||
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
/**
|
||||
* Copyright (c) 2013-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
jest.autoMockOff();
|
||||
|
@ -16,11 +25,7 @@ const writePlist = require('../../ios/writePlist');
|
|||
const projectPath = path.join(__dirname, '../../__fixtures__/project.pbxproj');
|
||||
const infoPlistPath = path.join(__dirname, '../../__fixtures__/Info.plist');
|
||||
|
||||
fs.__setMockFilesystem({
|
||||
'Basic': {
|
||||
'project.pbxproj': readFileSync(projectPath).toString(),
|
||||
}
|
||||
});
|
||||
fs.readFileSync = jest.fn(() => readFileSync(projectPath).toString());
|
||||
|
||||
const project = xcode.project('/Basic/project.pbxproj');
|
||||
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
{
|
||||
"rules": {
|
||||
"extra-arrow-initializer": 0,
|
||||
"no-alert": 0,
|
||||
"no-console-disallow": 0
|
||||
},
|
||||
"env": {
|
||||
"node": true
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
Glossary
|
||||
===
|
||||
|
||||
Terminology commonly used in React Native Packager / Metro Bundler is explained
|
||||
here. This document is work in progress, please help completing it.
|
||||
|
||||
## Build Root
|
||||
|
||||
Configuration files (`rn-cli.config.js`) support configuring one or more roots
|
||||
that are watched for file changes during development. In the context of the
|
||||
integration with the `js_*` rule family in [Buck][], there is only a single root,
|
||||
the build root used by Buck.
|
||||
|
||||
|
||||
## Local Path
|
||||
|
||||
A *local path* / `localPath` is the path to a file relative to a
|
||||
[*build root*](#build-root).
|
||||
|
||||
|
||||
|
||||
[Buck]: http://buckbuild.com/
|
|
@ -1,9 +0,0 @@
|
|||
# metro-bundler
|
||||
|
||||
🚇 The JavaScript bundler for React Native.
|
||||
|
||||
- **🚅 Fast**: We aim for sub-second reload cycles, fast startup and quick bundling speeds.
|
||||
- **⚖️ Scalable**: Works with thousands of modules in a single application.
|
||||
- **⚛️ Integrated**: Supports every React Native project out of the box.
|
||||
|
||||
This project was previously part of the [react-native](https://github.com/facebook/react-native) repository. In this smaller repository it is easier for the team working on Metro Bundler to respond to both issues and pull requests. See [react-native#13976](https://github.com/facebook/react-native/issues/13976) for the initial announcement.
|
|
@ -1,46 +0,0 @@
|
|||
{
|
||||
"version": "0.7.0",
|
||||
"name": "metro-bundler",
|
||||
"description": "🚇 The JavaScript bundler for React Native.",
|
||||
"main": "src/index.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git@github.com:facebook/metro-bundler.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"absolute-path": "^0.0.0",
|
||||
"async": "^2.4.0",
|
||||
"babel-core": "^6.24.1",
|
||||
"babel-generator": "^6.24.1",
|
||||
"babel-plugin-external-helpers": "^6.18.0",
|
||||
"babel-preset-es2015-node": "^6.1.1",
|
||||
"babel-preset-fbjs": "^2.1.0",
|
||||
"babel-preset-react-native": "^1.9.1",
|
||||
"babel-register": "^6.24.1",
|
||||
"babylon": "^6.17.0",
|
||||
"chalk": "^1.1.1",
|
||||
"concat-stream": "^1.6.0",
|
||||
"core-js": "^2.2.2",
|
||||
"debug": "^2.2.0",
|
||||
"denodeify": "^1.2.1",
|
||||
"fbjs": "0.8.12",
|
||||
"graceful-fs": "^4.1.3",
|
||||
"image-size": "^0.3.5",
|
||||
"jest-haste-map": "^20.0.4",
|
||||
"json-stable-stringify": "^1.0.1",
|
||||
"json5": "^0.4.0",
|
||||
"left-pad": "^1.1.3",
|
||||
"lodash": "^4.16.6",
|
||||
"merge-stream": "^1.0.1",
|
||||
"mime-types": "2.1.11",
|
||||
"mkdirp": "^0.5.1",
|
||||
"request": "^2.79.0",
|
||||
"rimraf": "^2.5.4",
|
||||
"source-map": "^0.5.6",
|
||||
"temp": "0.8.3",
|
||||
"throat": "^3.0.0",
|
||||
"uglify-js": "2.7.5",
|
||||
"write-file-atomic": "^1.2.0",
|
||||
"xpipe": "^1.0.5"
|
||||
}
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"presets": [ "react-native" ],
|
||||
"plugins": []
|
||||
}
|
|
@ -1,301 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2013-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
jest.mock('fs');
|
||||
|
||||
const AssetServer = require('../');
|
||||
const crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
|
||||
const {objectContaining} = jasmine;
|
||||
|
||||
describe('AssetServer', () => {
|
||||
describe('assetServer.get', () => {
|
||||
it('should work for the simple case', () => {
|
||||
const server = new AssetServer({
|
||||
projectRoots: ['/root'],
|
||||
assetExts: ['png'],
|
||||
});
|
||||
|
||||
fs.__setMockFilesystem({
|
||||
'root': {
|
||||
imgs: {
|
||||
'b.png': 'b image',
|
||||
'b@2x.png': 'b2 image',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return Promise.all([
|
||||
server.get('imgs/b.png'),
|
||||
server.get('imgs/b@1x.png'),
|
||||
]).then(resp =>
|
||||
resp.forEach(data =>
|
||||
expect(data).toBe('b image')
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
it('should work for the simple case with platform ext', () => {
|
||||
const server = new AssetServer({
|
||||
projectRoots: ['/root'],
|
||||
assetExts: ['png'],
|
||||
});
|
||||
|
||||
fs.__setMockFilesystem({
|
||||
'root': {
|
||||
imgs: {
|
||||
'b.ios.png': 'b ios image',
|
||||
'b.android.png': 'b android image',
|
||||
'c.png': 'c general image',
|
||||
'c.android.png': 'c android image',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return Promise.all([
|
||||
server.get('imgs/b.png', 'ios').then(
|
||||
data => expect(data).toBe('b ios image')
|
||||
),
|
||||
server.get('imgs/b.png', 'android').then(
|
||||
data => expect(data).toBe('b android image')
|
||||
),
|
||||
server.get('imgs/c.png', 'android').then(
|
||||
data => expect(data).toBe('c android image')
|
||||
),
|
||||
server.get('imgs/c.png', 'ios').then(
|
||||
data => expect(data).toBe('c general image')
|
||||
),
|
||||
server.get('imgs/c.png').then(
|
||||
data => expect(data).toBe('c general image')
|
||||
),
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
it('should work for the simple case with jpg', () => {
|
||||
const server = new AssetServer({
|
||||
projectRoots: ['/root'],
|
||||
assetExts: ['png', 'jpg'],
|
||||
});
|
||||
|
||||
fs.__setMockFilesystem({
|
||||
'root': {
|
||||
imgs: {
|
||||
'b.png': 'png image',
|
||||
'b.jpg': 'jpeg image',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return Promise.all([
|
||||
server.get('imgs/b.jpg'),
|
||||
server.get('imgs/b.png'),
|
||||
]).then(data =>
|
||||
expect(data).toEqual([
|
||||
'jpeg image',
|
||||
'png image',
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
it('should pick the bigger one', () => {
|
||||
const server = new AssetServer({
|
||||
projectRoots: ['/root'],
|
||||
assetExts: ['png'],
|
||||
});
|
||||
|
||||
fs.__setMockFilesystem({
|
||||
'root': {
|
||||
imgs: {
|
||||
'b@1x.png': 'b1 image',
|
||||
'b@2x.png': 'b2 image',
|
||||
'b@4x.png': 'b4 image',
|
||||
'b@4.5x.png': 'b4.5 image',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return server.get('imgs/b@3x.png').then(data =>
|
||||
expect(data).toBe('b4 image')
|
||||
);
|
||||
});
|
||||
|
||||
it('should pick the bigger one with platform ext', () => {
|
||||
const server = new AssetServer({
|
||||
projectRoots: ['/root'],
|
||||
assetExts: ['png'],
|
||||
});
|
||||
|
||||
fs.__setMockFilesystem({
|
||||
'root': {
|
||||
imgs: {
|
||||
'b@1x.png': 'b1 image',
|
||||
'b@2x.png': 'b2 image',
|
||||
'b@4x.png': 'b4 image',
|
||||
'b@4.5x.png': 'b4.5 image',
|
||||
'b@1x.ios.png': 'b1 ios image',
|
||||
'b@2x.ios.png': 'b2 ios image',
|
||||
'b@4x.ios.png': 'b4 ios image',
|
||||
'b@4.5x.ios.png': 'b4.5 ios image',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return Promise.all([
|
||||
server.get('imgs/b@3x.png').then(data =>
|
||||
expect(data).toBe('b4 image')
|
||||
),
|
||||
server.get('imgs/b@3x.png', 'ios').then(data =>
|
||||
expect(data).toBe('b4 ios image')
|
||||
),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should support multiple project roots', () => {
|
||||
const server = new AssetServer({
|
||||
projectRoots: ['/root', '/root2'],
|
||||
assetExts: ['png'],
|
||||
});
|
||||
|
||||
fs.__setMockFilesystem({
|
||||
'root': {
|
||||
imgs: {
|
||||
'b.png': 'b image',
|
||||
},
|
||||
},
|
||||
'root2': {
|
||||
'newImages': {
|
||||
'imgs': {
|
||||
'b@1x.png': 'b1 image',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return server.get('newImages/imgs/b.png').then(data =>
|
||||
expect(data).toBe('b1 image')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('assetServer.getAssetData', () => {
|
||||
it('should get assetData', () => {
|
||||
const server = new AssetServer({
|
||||
projectRoots: ['/root'],
|
||||
assetExts: ['png'],
|
||||
});
|
||||
|
||||
fs.__setMockFilesystem({
|
||||
'root': {
|
||||
imgs: {
|
||||
'b@1x.png': 'b1 image',
|
||||
'b@2x.png': 'b2 image',
|
||||
'b@4x.png': 'b4 image',
|
||||
'b@4.5x.png': 'b4.5 image',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return server.getAssetData('imgs/b.png').then(data => {
|
||||
expect(data).toEqual(objectContaining({
|
||||
type: 'png',
|
||||
name: 'b',
|
||||
scales: [1, 2, 4, 4.5],
|
||||
files: [
|
||||
'/root/imgs/b@1x.png',
|
||||
'/root/imgs/b@2x.png',
|
||||
'/root/imgs/b@4x.png',
|
||||
'/root/imgs/b@4.5x.png',
|
||||
],
|
||||
}));
|
||||
});
|
||||
});
|
||||
|
||||
it('should get assetData for non-png images', () => {
|
||||
const server = new AssetServer({
|
||||
projectRoots: ['/root'],
|
||||
assetExts: ['png', 'jpeg'],
|
||||
});
|
||||
|
||||
fs.__setMockFilesystem({
|
||||
'root': {
|
||||
imgs: {
|
||||
'b@1x.jpg': 'b1 image',
|
||||
'b@2x.jpg': 'b2 image',
|
||||
'b@4x.jpg': 'b4 image',
|
||||
'b@4.5x.jpg': 'b4.5 image',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return server.getAssetData('imgs/b.jpg').then(data => {
|
||||
expect(data).toEqual(objectContaining({
|
||||
type: 'jpg',
|
||||
name: 'b',
|
||||
scales: [1, 2, 4, 4.5],
|
||||
files: [
|
||||
'/root/imgs/b@1x.jpg',
|
||||
'/root/imgs/b@2x.jpg',
|
||||
'/root/imgs/b@4x.jpg',
|
||||
'/root/imgs/b@4.5x.jpg',
|
||||
],
|
||||
}));
|
||||
});
|
||||
});
|
||||
|
||||
describe('hash:', () => {
|
||||
let server, mockFS;
|
||||
beforeEach(() => {
|
||||
server = new AssetServer({
|
||||
projectRoots: ['/root'],
|
||||
assetExts: ['jpg'],
|
||||
});
|
||||
|
||||
mockFS = {
|
||||
'root': {
|
||||
imgs: {
|
||||
'b@1x.jpg': 'b1 image',
|
||||
'b@2x.jpg': 'b2 image',
|
||||
'b@4x.jpg': 'b4 image',
|
||||
'b@4.5x.jpg': 'b4.5 image',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
fs.__setMockFilesystem(mockFS);
|
||||
});
|
||||
|
||||
it('uses the file contents to build the hash', () => {
|
||||
const hash = crypto.createHash('md5');
|
||||
for (const name in mockFS.root.imgs) {
|
||||
hash.update(mockFS.root.imgs[name]);
|
||||
}
|
||||
|
||||
return server.getAssetData('imgs/b.jpg').then(data =>
|
||||
expect(data).toEqual(objectContaining({hash: hash.digest('hex')}))
|
||||
);
|
||||
});
|
||||
|
||||
it('changes the hash when the passed-in file watcher emits an `all` event', () => {
|
||||
return server.getAssetData('imgs/b.jpg').then(initialData => {
|
||||
mockFS.root.imgs['b@4x.jpg'] = 'updated data';
|
||||
server.onFileChange('all', '/root/imgs/b@4x.jpg');
|
||||
return server.getAssetData('imgs/b.jpg').then(data =>
|
||||
expect(data.hash).not.toEqual(initialData.hash)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,253 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const AssetPaths = require('../node-haste/lib/AssetPaths');
|
||||
|
||||
const crypto = require('crypto');
|
||||
const denodeify = require('denodeify');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
import type {AssetData} from '../node-haste/lib/AssetPaths';
|
||||
|
||||
const stat = denodeify(fs.stat);
|
||||
const readDir = denodeify(fs.readdir);
|
||||
const readFile = denodeify(fs.readFile);
|
||||
|
||||
class AssetServer {
|
||||
|
||||
_roots: $ReadOnlyArray<string>;
|
||||
_assetExts: $ReadOnlyArray<string>;
|
||||
_hashes: Map<?string, string>;
|
||||
_files: Map<string, string>;
|
||||
|
||||
constructor(options: {|
|
||||
+assetExts: $ReadOnlyArray<string>,
|
||||
+projectRoots: $ReadOnlyArray<string>,
|
||||
|}) {
|
||||
this._roots = options.projectRoots;
|
||||
this._assetExts = options.assetExts;
|
||||
this._hashes = new Map();
|
||||
this._files = new Map();
|
||||
}
|
||||
|
||||
get(assetPath: string, platform: ?string = null): Promise<Buffer> {
|
||||
const assetData = AssetPaths.parse(
|
||||
assetPath,
|
||||
new Set(platform != null ? [platform] : []),
|
||||
);
|
||||
return this._getAssetRecord(assetPath, platform).then(record => {
|
||||
for (let i = 0; i < record.scales.length; i++) {
|
||||
if (record.scales[i] >= assetData.resolution) {
|
||||
return readFile(record.files[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return readFile(record.files[record.files.length - 1]);
|
||||
});
|
||||
}
|
||||
|
||||
getAssetData(assetPath: string, platform: ?string = null): Promise<{|
|
||||
files: Array<string>,
|
||||
hash: string,
|
||||
name: string,
|
||||
scales: Array<number>,
|
||||
type: string,
|
||||
|}> {
|
||||
const nameData = AssetPaths.parse(
|
||||
assetPath,
|
||||
new Set(platform != null ? [platform] : []),
|
||||
);
|
||||
const {name, type} = nameData;
|
||||
|
||||
return this._getAssetRecord(assetPath, platform).then(record => {
|
||||
const {scales, files} = record;
|
||||
|
||||
const hash = this._hashes.get(assetPath);
|
||||
if (hash != null) {
|
||||
return {files, hash, name, scales, type};
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const hasher = crypto.createHash('md5');
|
||||
hashFiles(files.slice(), hasher, error => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
const freshHash = hasher.digest('hex');
|
||||
this._hashes.set(assetPath, freshHash);
|
||||
files.forEach(f => this._files.set(f, assetPath));
|
||||
resolve({files, hash: freshHash, name, scales, type});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
onFileChange(type: string, filePath: string) {
|
||||
this._hashes.delete(this._files.get(filePath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a request for an image by path. That could contain a resolution
|
||||
* postfix, we need to find that image (or the closest one to it's resolution)
|
||||
* in one of the project roots:
|
||||
*
|
||||
* 1. We first parse the directory of the asset
|
||||
* 2. We check to find a matching directory in one of the project roots
|
||||
* 3. We then build a map of all assets and their scales in this directory
|
||||
* 4. Then try to pick platform-specific asset records
|
||||
* 5. Then pick the closest resolution (rounding up) to the requested one
|
||||
*/
|
||||
_getAssetRecord(assetPath: string, platform: ?string = null): Promise<{|
|
||||
files: Array<string>,
|
||||
scales: Array<number>,
|
||||
|}> {
|
||||
const filename = path.basename(assetPath);
|
||||
|
||||
return (
|
||||
this._findRoot(
|
||||
this._roots,
|
||||
path.dirname(assetPath),
|
||||
assetPath,
|
||||
)
|
||||
.then(dir => Promise.all([
|
||||
dir,
|
||||
readDir(dir),
|
||||
]))
|
||||
.then(res => {
|
||||
const dir = res[0];
|
||||
const files = res[1];
|
||||
const assetData = AssetPaths.parse(
|
||||
filename,
|
||||
new Set(platform != null ? [platform] : []),
|
||||
);
|
||||
|
||||
const map = this._buildAssetMap(dir, files, platform);
|
||||
|
||||
let record;
|
||||
if (platform != null) {
|
||||
record = map.get(getAssetKey(assetData.assetName, platform)) ||
|
||||
map.get(assetData.assetName);
|
||||
} else {
|
||||
record = map.get(assetData.assetName);
|
||||
}
|
||||
|
||||
if (!record) {
|
||||
throw new Error(
|
||||
/* $FlowFixMe: platform can be null */
|
||||
`Asset not found: ${assetPath} for platform: ${platform}`
|
||||
);
|
||||
}
|
||||
|
||||
return record;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
_findRoot(roots: $ReadOnlyArray<string>, dir: string, debugInfoFile: string): Promise<string> {
|
||||
return Promise.all(
|
||||
roots.map(root => {
|
||||
const absRoot = path.resolve(root);
|
||||
// important: we want to resolve root + dir
|
||||
// to ensure the requested path doesn't traverse beyond root
|
||||
const absPath = path.resolve(root, dir);
|
||||
return stat(absPath).then(fstat => {
|
||||
// keep asset requests from traversing files
|
||||
// up from the root (e.g. ../../../etc/hosts)
|
||||
if (!absPath.startsWith(absRoot)) {
|
||||
return {path: absPath, isValid: false};
|
||||
}
|
||||
return {path: absPath, isValid: fstat.isDirectory()};
|
||||
}, _ => {
|
||||
return {path: absPath, isValid: false};
|
||||
});
|
||||
})
|
||||
).then(stats => {
|
||||
for (let i = 0; i < stats.length; i++) {
|
||||
if (stats[i].isValid) {
|
||||
return stats[i].path;
|
||||
}
|
||||
}
|
||||
|
||||
const rootsString = roots.map(s => `'${s}'`).join(', ');
|
||||
throw new Error(
|
||||
`'${debugInfoFile}' could not be found, because '${dir}' is not a ` +
|
||||
`subdirectory of any of the roots (${rootsString})`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
_buildAssetMap(dir: string, files: $ReadOnlyArray<string>, platform: ?string): Map<string, {|
|
||||
files: Array<string>,
|
||||
scales: Array<number>,
|
||||
|}> {
|
||||
const platforms = new Set(platform != null ? [platform] : []);
|
||||
const assets = files.map(this._getAssetDataFromName.bind(this, platforms));
|
||||
const map = new Map();
|
||||
assets.forEach(function(asset, i) {
|
||||
if (asset == null) {
|
||||
return;
|
||||
}
|
||||
const file = files[i];
|
||||
const assetKey = getAssetKey(asset.assetName, asset.platform);
|
||||
let record = map.get(assetKey);
|
||||
if (!record) {
|
||||
record = {
|
||||
scales: [],
|
||||
files: [],
|
||||
};
|
||||
map.set(assetKey, record);
|
||||
}
|
||||
|
||||
let insertIndex;
|
||||
const length = record.scales.length;
|
||||
|
||||
for (insertIndex = 0; insertIndex < length; insertIndex++) {
|
||||
if (asset.resolution < record.scales[insertIndex]) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
record.scales.splice(insertIndex, 0, asset.resolution);
|
||||
record.files.splice(insertIndex, 0, path.join(dir, file));
|
||||
});
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
_getAssetDataFromName(platforms: Set<string>, file: string): ?AssetData {
|
||||
return AssetPaths.tryParse(file, platforms);
|
||||
}
|
||||
}
|
||||
|
||||
function getAssetKey(assetName, platform) {
|
||||
if (platform != null) {
|
||||
return `${assetName} : ${platform}`;
|
||||
} else {
|
||||
return assetName;
|
||||
}
|
||||
}
|
||||
|
||||
function hashFiles(files, hash, callback) {
|
||||
if (!files.length) {
|
||||
callback(null);
|
||||
return;
|
||||
}
|
||||
|
||||
fs.createReadStream(files.shift())
|
||||
.on('data', data => hash.update(data))
|
||||
.once('end', () => hashFiles(files, hash, callback))
|
||||
.once('error', error => callback(error));
|
||||
}
|
||||
|
||||
module.exports = AssetServer;
|
|
@ -1,370 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const BundleBase = require('./BundleBase');
|
||||
const ModuleTransport = require('../lib/ModuleTransport');
|
||||
|
||||
const _ = require('lodash');
|
||||
const crypto = require('crypto');
|
||||
const debug = require('debug')('RNP:Bundle');
|
||||
const invariant = require('fbjs/lib/invariant');
|
||||
|
||||
const {createRamBundleGroups} = require('./util');
|
||||
const {fromRawMappings} = require('./source-map');
|
||||
const {isMappingsMap} = require('../lib/SourceMap');
|
||||
|
||||
import type {IndexMap, MappingsMap, SourceMap} from '../lib/SourceMap';
|
||||
import type {GetSourceOptions, FinalizeOptions} from './BundleBase';
|
||||
|
||||
export type Unbundle = {
|
||||
startupModules: Array<*>,
|
||||
lazyModules: Array<*>,
|
||||
groups: Map<number, Set<number>>,
|
||||
};
|
||||
|
||||
type SourceMapFormat = 'undetermined' | 'indexed' | 'flattened';
|
||||
|
||||
const SOURCEMAPPING_URL = '\n\/\/# sourceMappingURL=';
|
||||
|
||||
class Bundle extends BundleBase {
|
||||
|
||||
_dev: boolean | void;
|
||||
_inlineSourceMap: string | void;
|
||||
_minify: boolean | void;
|
||||
_numRequireCalls: number;
|
||||
_ramBundle: Unbundle | null;
|
||||
_ramGroups: ?Array<string>;
|
||||
_sourceMap: string | null;
|
||||
_sourceMapFormat: SourceMapFormat;
|
||||
_sourceMapUrl: ?string;
|
||||
|
||||
constructor({sourceMapUrl, dev, minify, ramGroups}: {
|
||||
sourceMapUrl: ?string,
|
||||
dev?: boolean,
|
||||
minify?: boolean,
|
||||
ramGroups?: Array<string>,
|
||||
} = {}) {
|
||||
super();
|
||||
this._sourceMap = null;
|
||||
this._sourceMapFormat = 'undetermined';
|
||||
this._sourceMapUrl = sourceMapUrl;
|
||||
this._numRequireCalls = 0;
|
||||
this._dev = dev;
|
||||
this._minify = minify;
|
||||
|
||||
this._ramGroups = ramGroups;
|
||||
this._ramBundle = null; // cached RAM Bundle
|
||||
}
|
||||
|
||||
addModule(
|
||||
/**
|
||||
* $FlowFixMe: this code is inherently incorrect, because it modifies the
|
||||
* signature of the base class function "addModule". That means callsites
|
||||
* using an instance typed as the base class would be broken. This must be
|
||||
* refactored.
|
||||
*/
|
||||
resolver: {wrapModule: (options: any) => Promise<{code: any, map: any}>},
|
||||
resolutionResponse: mixed,
|
||||
module: mixed,
|
||||
/* $FlowFixMe: erroneous change of signature. */
|
||||
moduleTransport: ModuleTransport,
|
||||
/* $FlowFixMe: erroneous change of signature. */
|
||||
): Promise<void> {
|
||||
const index = super.addModule(moduleTransport);
|
||||
return resolver.wrapModule({
|
||||
resolutionResponse,
|
||||
module,
|
||||
name: moduleTransport.name,
|
||||
code: moduleTransport.code,
|
||||
map: moduleTransport.map,
|
||||
meta: moduleTransport.meta,
|
||||
minify: this._minify,
|
||||
dev: this._dev,
|
||||
}).then(({code, map}) => {
|
||||
// If we get a map from the transformer we'll switch to a mode
|
||||
// were we're combining the source maps as opposed to
|
||||
if (map) {
|
||||
const usesRawMappings = isRawMappings(map);
|
||||
|
||||
if (this._sourceMapFormat === 'undetermined') {
|
||||
this._sourceMapFormat = usesRawMappings ? 'flattened' : 'indexed';
|
||||
} else if (usesRawMappings && this._sourceMapFormat === 'indexed') {
|
||||
throw new Error(
|
||||
`Got at least one module with a full source map, but ${
|
||||
moduleTransport.sourcePath} has raw mappings`
|
||||
);
|
||||
} else if (!usesRawMappings && this._sourceMapFormat === 'flattened') {
|
||||
throw new Error(
|
||||
`Got at least one module with raw mappings, but ${
|
||||
moduleTransport.sourcePath} has a full source map`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
this.replaceModuleAt(
|
||||
index, new ModuleTransport({...moduleTransport, code, map}));
|
||||
});
|
||||
}
|
||||
|
||||
finalize(options: FinalizeOptions) {
|
||||
options = options || {};
|
||||
if (options.runModule) {
|
||||
/* $FlowFixMe: this is unsound, as nothing enforces runBeforeMainModule
|
||||
* to be available if `runModule` is true. Refactor. */
|
||||
options.runBeforeMainModule.forEach(this._addRequireCall, this);
|
||||
/* $FlowFixMe: this is unsound, as nothing enforces the module ID to have
|
||||
* been set beforehand. */
|
||||
this._addRequireCall(this.getMainModuleId());
|
||||
}
|
||||
|
||||
super.finalize(options);
|
||||
}
|
||||
|
||||
_addRequireCall(moduleId: string) {
|
||||
const code = `;require(${JSON.stringify(moduleId)});`;
|
||||
const name = 'require-' + moduleId;
|
||||
super.addModule(new ModuleTransport({
|
||||
name,
|
||||
id: -this._numRequireCalls - 1,
|
||||
code,
|
||||
virtual: true,
|
||||
sourceCode: code,
|
||||
sourcePath: name + '.js',
|
||||
meta: {preloaded: true},
|
||||
}));
|
||||
this._numRequireCalls += 1;
|
||||
}
|
||||
|
||||
_getInlineSourceMap(dev: ?boolean) {
|
||||
if (this._inlineSourceMap == null) {
|
||||
const sourceMap = this.getSourceMapString({excludeSource: true, dev});
|
||||
/*eslint-env node*/
|
||||
const encoded = new Buffer(sourceMap).toString('base64');
|
||||
this._inlineSourceMap = 'data:application/json;base64,' + encoded;
|
||||
}
|
||||
return this._inlineSourceMap;
|
||||
}
|
||||
|
||||
getSource(options: GetSourceOptions) {
|
||||
this.assertFinalized();
|
||||
|
||||
options = options || {};
|
||||
|
||||
let source = super.getSource(options);
|
||||
|
||||
if (options.inlineSourceMap) {
|
||||
source += SOURCEMAPPING_URL + this._getInlineSourceMap(options.dev);
|
||||
} else if (this._sourceMapUrl) {
|
||||
source += SOURCEMAPPING_URL + this._sourceMapUrl;
|
||||
}
|
||||
|
||||
return source;
|
||||
}
|
||||
|
||||
getUnbundle(): Unbundle {
|
||||
this.assertFinalized();
|
||||
if (!this._ramBundle) {
|
||||
const modules = this.getModules().slice();
|
||||
|
||||
// separate modules we need to preload from the ones we don't
|
||||
const [startupModules, lazyModules] = partition(modules, shouldPreload);
|
||||
|
||||
const ramGroups = this._ramGroups;
|
||||
let groups;
|
||||
this._ramBundle = {
|
||||
startupModules,
|
||||
lazyModules,
|
||||
get groups() {
|
||||
if (!groups) {
|
||||
groups = createRamBundleGroups(ramGroups || [], lazyModules, subtree);
|
||||
}
|
||||
return groups;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return this._ramBundle;
|
||||
}
|
||||
|
||||
invalidateSource() {
|
||||
debug('invalidating bundle');
|
||||
super.invalidateSource();
|
||||
this._sourceMap = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Combine each of the sourcemaps multiple modules have into a single big
|
||||
* one. This works well thanks to a neat trick defined on the sourcemap spec
|
||||
* that makes use of of the `sections` field to combine sourcemaps by adding
|
||||
* an offset. This is supported only by Chrome for now.
|
||||
*/
|
||||
_getCombinedSourceMaps(options: {excludeSource?: boolean}): IndexMap {
|
||||
const result = {
|
||||
version: 3,
|
||||
file: this._getSourceMapFile(),
|
||||
sections: [],
|
||||
};
|
||||
|
||||
let line = 0;
|
||||
this.getModules().forEach(module => {
|
||||
invariant(
|
||||
!Array.isArray(module.map),
|
||||
`Unexpected raw mappings for ${module.sourcePath}`,
|
||||
);
|
||||
let map: SourceMap = module.map == null || module.virtual
|
||||
? generateSourceMapForVirtualModule(module)
|
||||
: module.map;
|
||||
|
||||
|
||||
if (options.excludeSource && isMappingsMap(map)) {
|
||||
map = {...map, sourcesContent: []};
|
||||
}
|
||||
|
||||
result.sections.push({
|
||||
offset: {line, column: 0},
|
||||
map,
|
||||
});
|
||||
line += module.code.split('\n').length;
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
getSourceMap(options: {excludeSource?: boolean}): SourceMap {
|
||||
this.assertFinalized();
|
||||
|
||||
return this._sourceMapFormat === 'indexed'
|
||||
? this._getCombinedSourceMaps(options)
|
||||
: fromRawMappings(this.getModules()).toMap();
|
||||
}
|
||||
|
||||
getSourceMapString(options: {excludeSource?: boolean}): string {
|
||||
if (this._sourceMapFormat === 'indexed') {
|
||||
return JSON.stringify(this.getSourceMap(options));
|
||||
}
|
||||
|
||||
// The following code is an optimization specific to the development server:
|
||||
// 1. generator.toSource() is faster than JSON.stringify(generator.toMap()).
|
||||
// 2. caching the source map unless there are changes saves time in
|
||||
// development settings.
|
||||
let map = this._sourceMap;
|
||||
if (map == null) {
|
||||
debug('Start building flat source map');
|
||||
map = this._sourceMap = fromRawMappings(this.getModules()).toString();
|
||||
debug('End building flat source map');
|
||||
} else {
|
||||
debug('Returning cached source map');
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
getEtag() {
|
||||
/* $FlowFixMe: we must pass options, or rename the
|
||||
* base `getSource` function, as it does not actually need options. */
|
||||
var eTag = crypto.createHash('md5').update(this.getSource()).digest('hex');
|
||||
return eTag;
|
||||
}
|
||||
|
||||
_getSourceMapFile() {
|
||||
return this._sourceMapUrl
|
||||
? this._sourceMapUrl.replace('.map', '.bundle')
|
||||
: 'bundle.js';
|
||||
}
|
||||
|
||||
getJSModulePaths() {
|
||||
return this.getModules()
|
||||
// Filter out non-js files. Like images etc.
|
||||
.filter(module => !module.virtual)
|
||||
.map(module => module.sourcePath);
|
||||
}
|
||||
|
||||
getDebugInfo() {
|
||||
return [
|
||||
/* $FlowFixMe: this is unsound as the module ID could be unset. */
|
||||
'<div><h3>Main Module:</h3> ' + this.getMainModuleId() + '</div>',
|
||||
'<style>',
|
||||
'pre.collapsed {',
|
||||
' height: 10px;',
|
||||
' width: 100px;',
|
||||
' display: block;',
|
||||
' text-overflow: ellipsis;',
|
||||
' overflow: hidden;',
|
||||
' cursor: pointer;',
|
||||
'}',
|
||||
'</style>',
|
||||
'<h3> Module paths and transformed code: </h3>',
|
||||
this.getModules().map(function(m) {
|
||||
return '<div> <h4> Path: </h4>' + m.sourcePath + '<br/> <h4> Source: </h4>' +
|
||||
'<code><pre class="collapsed" onclick="this.classList.remove(\'collapsed\')">' +
|
||||
_.escape(m.code) + '</pre></code></div>';
|
||||
}).join('\n'),
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
setRamGroups(ramGroups: ?Array<string>) {
|
||||
this._ramGroups = ramGroups;
|
||||
}
|
||||
}
|
||||
|
||||
function generateSourceMapForVirtualModule(module): MappingsMap {
|
||||
// All lines map 1-to-1
|
||||
let mappings = 'AAAA;';
|
||||
|
||||
for (let i = 1; i < module.code.split('\n').length; i++) {
|
||||
mappings += 'AACA;';
|
||||
}
|
||||
|
||||
return {
|
||||
version: 3,
|
||||
sources: [module.sourcePath],
|
||||
names: [],
|
||||
mappings,
|
||||
file: module.sourcePath,
|
||||
sourcesContent: [module.sourceCode],
|
||||
};
|
||||
}
|
||||
|
||||
function shouldPreload({meta}) {
|
||||
return meta && meta.preloaded;
|
||||
}
|
||||
|
||||
function partition(array, predicate) {
|
||||
const included = [];
|
||||
const excluded = [];
|
||||
array.forEach(item => (predicate(item) ? included : excluded).push(item));
|
||||
return [included, excluded];
|
||||
}
|
||||
|
||||
function * subtree(
|
||||
moduleTransport: ModuleTransport,
|
||||
moduleTransportsByPath: Map<string, ModuleTransport>,
|
||||
seen = new Set(),
|
||||
) {
|
||||
seen.add(moduleTransport.id);
|
||||
const {meta} = moduleTransport;
|
||||
invariant(
|
||||
meta != null,
|
||||
'Unexpected module transport without meta information: ' + moduleTransport.sourcePath,
|
||||
);
|
||||
for (const [, {path}] of meta.dependencyPairs || []) {
|
||||
const dependency = moduleTransportsByPath.get(path);
|
||||
if (dependency && !seen.has(dependency.id)) {
|
||||
yield dependency.id;
|
||||
yield * subtree(dependency, moduleTransportsByPath, seen);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const isRawMappings = Array.isArray;
|
||||
|
||||
module.exports = Bundle;
|
|
@ -1,114 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const ModuleTransport = require('../lib/ModuleTransport');
|
||||
|
||||
export type FinalizeOptions = {
|
||||
allowUpdates?: boolean,
|
||||
runBeforeMainModule?: Array<string>,
|
||||
runModule?: boolean,
|
||||
};
|
||||
|
||||
export type GetSourceOptions = {
|
||||
inlineSourceMap?: boolean,
|
||||
dev: boolean,
|
||||
};
|
||||
|
||||
class BundleBase {
|
||||
|
||||
_assets: Array<mixed>;
|
||||
_finalized: boolean;
|
||||
_mainModuleId: number | void;
|
||||
_source: ?string;
|
||||
__modules: Array<ModuleTransport>;
|
||||
|
||||
constructor() {
|
||||
this._finalized = false;
|
||||
this.__modules = [];
|
||||
this._assets = [];
|
||||
this._mainModuleId = undefined;
|
||||
}
|
||||
|
||||
isEmpty() {
|
||||
return this.__modules.length === 0 && this._assets.length === 0;
|
||||
}
|
||||
|
||||
getMainModuleId() {
|
||||
return this._mainModuleId;
|
||||
}
|
||||
|
||||
setMainModuleId(moduleId: number) {
|
||||
this._mainModuleId = moduleId;
|
||||
}
|
||||
|
||||
addModule(module: ModuleTransport) {
|
||||
if (!(module instanceof ModuleTransport)) {
|
||||
throw new Error('Expected a ModuleTransport object');
|
||||
}
|
||||
|
||||
return this.__modules.push(module) - 1;
|
||||
}
|
||||
|
||||
replaceModuleAt(index: number, module: ModuleTransport) {
|
||||
if (!(module instanceof ModuleTransport)) {
|
||||
throw new Error('Expeceted a ModuleTransport object');
|
||||
}
|
||||
|
||||
this.__modules[index] = module;
|
||||
}
|
||||
|
||||
getModules() {
|
||||
return this.__modules;
|
||||
}
|
||||
|
||||
getAssets() {
|
||||
return this._assets;
|
||||
}
|
||||
|
||||
addAsset(asset: mixed) {
|
||||
this._assets.push(asset);
|
||||
}
|
||||
|
||||
finalize(options: FinalizeOptions) {
|
||||
if (!options.allowUpdates) {
|
||||
Object.freeze(this.__modules);
|
||||
Object.freeze(this._assets);
|
||||
}
|
||||
|
||||
this._finalized = true;
|
||||
}
|
||||
|
||||
getSource(options: GetSourceOptions) {
|
||||
this.assertFinalized();
|
||||
|
||||
if (this._source) {
|
||||
return this._source;
|
||||
}
|
||||
|
||||
this._source = this.__modules.map(module => module.code).join('\n');
|
||||
return this._source;
|
||||
}
|
||||
|
||||
invalidateSource() {
|
||||
this._source = null;
|
||||
}
|
||||
|
||||
assertFinalized(message?: string) {
|
||||
if (!this._finalized) {
|
||||
throw new Error(message || 'Bundle needs to be finalized before getting any source');
|
||||
}
|
||||
}
|
||||
|
||||
setRamGroups(ramGroups: Array<string>) {}
|
||||
}
|
||||
|
||||
module.exports = BundleBase;
|
|
@ -1,88 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const BundleBase = require('./BundleBase');
|
||||
const ModuleTransport = require('../lib/ModuleTransport');
|
||||
|
||||
import type Resolver from '../Resolver';
|
||||
import type ResolutionResponse
|
||||
from '../node-haste/DependencyGraph/ResolutionResponse';
|
||||
import type Module from '../node-haste/Module';
|
||||
|
||||
class HMRBundle extends BundleBase {
|
||||
_sourceMappingURLFn: (hmrpath: string) => mixed;
|
||||
_sourceMappingURLs: Array<mixed>;
|
||||
_sourceURLFn: (hmrpath: string) => mixed;
|
||||
_sourceURLs: Array<mixed>;
|
||||
|
||||
constructor({
|
||||
sourceURLFn,
|
||||
sourceMappingURLFn,
|
||||
}: {
|
||||
sourceURLFn: (hmrpath: string) => mixed,
|
||||
sourceMappingURLFn: (hmrpath: string) => mixed,
|
||||
}) {
|
||||
super();
|
||||
this._sourceURLFn = sourceURLFn;
|
||||
this._sourceMappingURLFn = sourceMappingURLFn;
|
||||
this._sourceURLs = [];
|
||||
this._sourceMappingURLs = [];
|
||||
}
|
||||
|
||||
addModule(
|
||||
/* $FlowFixMe: broken OOP design: function signature should be the same */
|
||||
resolver: Resolver,
|
||||
/* $FlowFixMe: broken OOP design: function signature should be the same */
|
||||
response: ResolutionResponse<Module, {}>,
|
||||
/* $FlowFixMe: broken OOP design: function signature should be the same */
|
||||
module: Module,
|
||||
/* $FlowFixMe: broken OOP design: function signature should be the same */
|
||||
moduleTransport: ModuleTransport,
|
||||
) {
|
||||
const code = resolver.resolveRequires(
|
||||
response,
|
||||
module,
|
||||
moduleTransport.code,
|
||||
/* $FlowFixMe: may not exist */
|
||||
moduleTransport.meta.dependencyOffsets,
|
||||
);
|
||||
|
||||
super.addModule(new ModuleTransport({...moduleTransport, code}));
|
||||
this._sourceMappingURLs.push(
|
||||
this._sourceMappingURLFn(moduleTransport.sourcePath),
|
||||
);
|
||||
this._sourceURLs.push(this._sourceURLFn(moduleTransport.sourcePath));
|
||||
// inconsistent with parent class return type
|
||||
return (Promise.resolve(): any);
|
||||
}
|
||||
|
||||
getModulesIdsAndCode(): Array<{id: string, code: string}> {
|
||||
return this.__modules.map(module => {
|
||||
return {
|
||||
id: JSON.stringify(module.id),
|
||||
code: module.code,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
getSourceURLs() {
|
||||
return this._sourceURLs;
|
||||
}
|
||||
|
||||
getSourceMappingURLs() {
|
||||
return this._sourceMappingURLs;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = HMRBundle;
|
|
@ -1,494 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @format
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const Bundle = require('../Bundle');
|
||||
const ModuleTransport = require('../../lib/ModuleTransport');
|
||||
const crypto = require('crypto');
|
||||
|
||||
describe('Bundle', () => {
|
||||
var bundle;
|
||||
|
||||
beforeEach(() => {
|
||||
bundle = new Bundle({sourceMapUrl: 'test_url'});
|
||||
bundle.getSourceMap = jest.fn(() => {
|
||||
return 'test-source-map';
|
||||
});
|
||||
});
|
||||
|
||||
describe('source bundle', () => {
|
||||
it('should create a bundle and get the source', () => {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle,
|
||||
code: 'transformed foo;',
|
||||
sourceCode: 'source foo',
|
||||
sourcePath: 'foo path',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle,
|
||||
code: 'transformed bar;',
|
||||
sourceCode: 'source bar',
|
||||
sourcePath: 'bar path',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
bundle.finalize({});
|
||||
expect(bundle.getSource({dev: true})).toBe(
|
||||
[
|
||||
'transformed foo;',
|
||||
'transformed bar;',
|
||||
'\/\/# sourceMappingURL=test_url',
|
||||
].join('\n'),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should be ok to leave out the source map url', () => {
|
||||
const otherBundle = new Bundle();
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle: otherBundle,
|
||||
code: 'transformed foo;',
|
||||
sourceCode: 'source foo',
|
||||
sourcePath: 'foo path',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle: otherBundle,
|
||||
code: 'transformed bar;',
|
||||
sourceCode: 'source bar',
|
||||
sourcePath: 'bar path',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
otherBundle.finalize({});
|
||||
expect(otherBundle.getSource({dev: true})).toBe(
|
||||
['transformed foo;', 'transformed bar;'].join('\n'),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a bundle and add run module code', () => {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle,
|
||||
code: 'transformed foo;',
|
||||
sourceCode: 'source foo',
|
||||
sourcePath: 'foo path',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle,
|
||||
code: 'transformed bar;',
|
||||
sourceCode: 'source bar',
|
||||
sourcePath: 'bar path',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
bundle.setMainModuleId('foo');
|
||||
bundle.finalize({
|
||||
runBeforeMainModule: ['bar'],
|
||||
runModule: true,
|
||||
});
|
||||
expect(bundle.getSource({dev: true})).toBe(
|
||||
[
|
||||
'transformed foo;',
|
||||
'transformed bar;',
|
||||
';require("bar");',
|
||||
';require("foo");',
|
||||
'\/\/# sourceMappingURL=test_url',
|
||||
].join('\n'),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('inserts modules in a deterministic order, independent of timing of the wrapper process', () => {
|
||||
const moduleTransports = [
|
||||
createModuleTransport({name: 'module1'}),
|
||||
createModuleTransport({name: 'module2'}),
|
||||
createModuleTransport({name: 'module3'}),
|
||||
];
|
||||
|
||||
const resolves = {};
|
||||
const resolver = {
|
||||
wrapModule({name}) {
|
||||
return new Promise(resolve => {
|
||||
resolves[name] = resolve;
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
const promise = Promise.all(
|
||||
moduleTransports.map(m =>
|
||||
bundle.addModule(resolver, null, {isPolyfill: () => false}, m),
|
||||
),
|
||||
).then(() => {
|
||||
expect(bundle.getModules()).toEqual(moduleTransports);
|
||||
});
|
||||
|
||||
resolves.module2({code: ''});
|
||||
resolves.module3({code: ''});
|
||||
resolves.module1({code: ''});
|
||||
|
||||
return promise;
|
||||
});
|
||||
});
|
||||
|
||||
describe('sourcemap bundle', () => {
|
||||
it('should create sourcemap', () => {
|
||||
//TODO: #15357872 add a meaningful test here
|
||||
});
|
||||
|
||||
it('should combine sourcemaps', () => {
|
||||
const otherBundle = new Bundle({sourceMapUrl: 'test_url'});
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle: otherBundle,
|
||||
code: 'transformed foo;\n',
|
||||
sourceCode: 'source foo',
|
||||
map: {name: 'sourcemap foo'},
|
||||
sourcePath: 'foo path',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle: otherBundle,
|
||||
code: 'transformed bar;\n',
|
||||
sourceCode: 'source bar',
|
||||
map: {name: 'sourcemap bar'},
|
||||
sourcePath: 'bar path',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle: otherBundle,
|
||||
code: 'image module;\nimage module;',
|
||||
virtual: true,
|
||||
sourceCode: 'image module;\nimage module;',
|
||||
sourcePath: 'image.png',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
otherBundle.setMainModuleId('foo');
|
||||
otherBundle.finalize({
|
||||
runBeforeMainModule: ['InitializeCore'],
|
||||
runModule: true,
|
||||
});
|
||||
|
||||
const sourceMap = otherBundle.getSourceMap({dev: true});
|
||||
expect(sourceMap).toEqual({
|
||||
file: 'test_url',
|
||||
version: 3,
|
||||
sections: [
|
||||
{offset: {line: 0, column: 0}, map: {name: 'sourcemap foo'}},
|
||||
{offset: {line: 2, column: 0}, map: {name: 'sourcemap bar'}},
|
||||
{
|
||||
offset: {
|
||||
column: 0,
|
||||
line: 4,
|
||||
},
|
||||
map: {
|
||||
file: 'image.png',
|
||||
mappings: 'AAAA;AACA;',
|
||||
names: [],
|
||||
sources: ['image.png'],
|
||||
sourcesContent: ['image module;\nimage module;'],
|
||||
version: 3,
|
||||
},
|
||||
},
|
||||
{
|
||||
offset: {
|
||||
column: 0,
|
||||
line: 6,
|
||||
},
|
||||
map: {
|
||||
file: 'require-InitializeCore.js',
|
||||
mappings: 'AAAA;',
|
||||
names: [],
|
||||
sources: ['require-InitializeCore.js'],
|
||||
sourcesContent: [';require("InitializeCore");'],
|
||||
version: 3,
|
||||
},
|
||||
},
|
||||
{
|
||||
offset: {
|
||||
column: 0,
|
||||
line: 7,
|
||||
},
|
||||
map: {
|
||||
file: 'require-foo.js',
|
||||
mappings: 'AAAA;',
|
||||
names: [],
|
||||
sources: ['require-foo.js'],
|
||||
sourcesContent: [';require("foo");'],
|
||||
version: 3,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAssets()', () => {
|
||||
it('should save and return asset objects', () => {
|
||||
var p = new Bundle({sourceMapUrl: 'test_url'});
|
||||
var asset1 = {};
|
||||
var asset2 = {};
|
||||
p.addAsset(asset1);
|
||||
p.addAsset(asset2);
|
||||
p.finalize();
|
||||
expect(p.getAssets()).toEqual([asset1, asset2]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getJSModulePaths()', () => {
|
||||
it('should return module paths', () => {
|
||||
var otherBundle = new Bundle({sourceMapUrl: 'test_url'});
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle: otherBundle,
|
||||
code: 'transformed foo;\n',
|
||||
sourceCode: 'source foo',
|
||||
sourcePath: 'foo path',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
return addModule({
|
||||
bundle: otherBundle,
|
||||
code: 'image module;\nimage module;',
|
||||
virtual: true,
|
||||
sourceCode: 'image module;\nimage module;',
|
||||
sourcePath: 'image.png',
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
expect(otherBundle.getJSModulePaths()).toEqual(['foo path']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEtag()', function() {
|
||||
it('should return an etag', function() {
|
||||
bundle.finalize({});
|
||||
var eTag = crypto
|
||||
.createHash('md5')
|
||||
.update(bundle.getSource())
|
||||
.digest('hex');
|
||||
expect(bundle.getEtag()).toEqual(eTag);
|
||||
});
|
||||
});
|
||||
|
||||
describe('main module id:', function() {
|
||||
it('can save a main module ID', function() {
|
||||
const id = 'arbitrary module ID';
|
||||
bundle.setMainModuleId(id);
|
||||
expect(bundle.getMainModuleId()).toEqual(id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('random access bundle groups:', () => {
|
||||
let moduleTransports;
|
||||
beforeEach(() => {
|
||||
moduleTransports = [
|
||||
transport('Product1', ['React', 'Relay']),
|
||||
transport('React', ['ReactFoo', 'ReactBar']),
|
||||
transport('ReactFoo', ['invariant']),
|
||||
transport('invariant', []),
|
||||
transport('ReactBar', ['cx']),
|
||||
transport('cx', []),
|
||||
transport('OtherFramework', ['OtherFrameworkFoo', 'OtherFrameworkBar']),
|
||||
transport('OtherFrameworkFoo', ['invariant']),
|
||||
transport('OtherFrameworkBar', ['crc32']),
|
||||
transport('crc32', ['OtherFrameworkBar']),
|
||||
];
|
||||
});
|
||||
|
||||
it('can create a single group', () => {
|
||||
bundle = createBundle([fsLocation('React')]);
|
||||
const {groups} = bundle.getUnbundle();
|
||||
expect(groups).toEqual(
|
||||
new Map([
|
||||
[
|
||||
idFor('React'),
|
||||
new Set(['ReactFoo', 'invariant', 'ReactBar', 'cx'].map(idFor)),
|
||||
],
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('can create two groups', () => {
|
||||
bundle = createBundle([fsLocation('ReactFoo'), fsLocation('ReactBar')]);
|
||||
const {groups} = bundle.getUnbundle();
|
||||
expect(groups).toEqual(
|
||||
new Map([
|
||||
[idFor('ReactFoo'), new Set([idFor('invariant')])],
|
||||
[idFor('ReactBar'), new Set([idFor('cx')])],
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('can handle circular dependencies', () => {
|
||||
bundle = createBundle([fsLocation('OtherFramework')]);
|
||||
const {groups} = bundle.getUnbundle();
|
||||
expect(groups).toEqual(
|
||||
new Map([
|
||||
[
|
||||
idFor('OtherFramework'),
|
||||
new Set(
|
||||
[
|
||||
'OtherFrameworkFoo',
|
||||
'invariant',
|
||||
'OtherFrameworkBar',
|
||||
'crc32',
|
||||
].map(idFor),
|
||||
),
|
||||
],
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('omits modules that are contained by more than one group', () => {
|
||||
bundle = createBundle([
|
||||
fsLocation('React'),
|
||||
fsLocation('OtherFramework'),
|
||||
]);
|
||||
expect(() => {
|
||||
const {groups} = bundle.getUnbundle(); //eslint-disable-line no-unused-vars
|
||||
}).toThrow(
|
||||
new Error(
|
||||
`Module ${fsLocation('invariant')} belongs to groups ${fsLocation('React')}` +
|
||||
`, and ${fsLocation('OtherFramework')}. Ensure that each module is only part of one group.`,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('ignores missing dependencies', () => {
|
||||
bundle = createBundle([fsLocation('Product1')]);
|
||||
const {groups} = bundle.getUnbundle();
|
||||
expect(groups).toEqual(
|
||||
new Map([
|
||||
[
|
||||
idFor('Product1'),
|
||||
new Set(
|
||||
['React', 'ReactFoo', 'invariant', 'ReactBar', 'cx'].map(idFor),
|
||||
),
|
||||
],
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('throws for group roots that do not exist', () => {
|
||||
bundle = createBundle([fsLocation('DoesNotExist')]);
|
||||
expect(() => {
|
||||
const {groups} = bundle.getUnbundle(); //eslint-disable-line no-unused-vars
|
||||
}).toThrow(
|
||||
new Error(
|
||||
`Group root ${fsLocation('DoesNotExist')} is not part of the bundle`,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
function idFor(name) {
|
||||
const {map} = idFor;
|
||||
if (!map) {
|
||||
idFor.map = new Map([[name, 0]]);
|
||||
idFor.next = 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (map.has(name)) {
|
||||
return map.get(name);
|
||||
}
|
||||
|
||||
const id = idFor.next++;
|
||||
map.set(name, id);
|
||||
return id;
|
||||
}
|
||||
function createBundle(ramGroups, options = {}) {
|
||||
const b = new Bundle(Object.assign(options, {ramGroups}));
|
||||
moduleTransports.forEach(t => addModule({bundle: b, ...t}));
|
||||
b.finalize();
|
||||
return b;
|
||||
}
|
||||
function fsLocation(name) {
|
||||
return `/fs/${name}.js`;
|
||||
}
|
||||
function module(name) {
|
||||
return {path: fsLocation(name)};
|
||||
}
|
||||
function transport(name, deps) {
|
||||
return createModuleTransport({
|
||||
name,
|
||||
id: idFor(name),
|
||||
sourcePath: fsLocation(name),
|
||||
meta: {dependencyPairs: deps.map(d => [d, module(d)])},
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
function resolverFor(code, map) {
|
||||
return {
|
||||
wrapModule: () => Promise.resolve({code, map}),
|
||||
};
|
||||
}
|
||||
|
||||
function addModule({
|
||||
bundle,
|
||||
code,
|
||||
sourceCode,
|
||||
sourcePath,
|
||||
map,
|
||||
virtual,
|
||||
polyfill,
|
||||
meta,
|
||||
id = '',
|
||||
}) {
|
||||
return bundle.addModule(
|
||||
resolverFor(code, map),
|
||||
null,
|
||||
{isPolyfill: () => polyfill},
|
||||
createModuleTransport({
|
||||
code,
|
||||
sourceCode,
|
||||
sourcePath,
|
||||
id,
|
||||
map,
|
||||
meta,
|
||||
virtual,
|
||||
polyfill,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function createModuleTransport(data) {
|
||||
return new ModuleTransport({
|
||||
code: '',
|
||||
sourceCode: '',
|
||||
sourcePath: '',
|
||||
id: 'id' in data ? data.id : '',
|
||||
...data,
|
||||
});
|
||||
}
|
|
@ -1,460 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
jest
|
||||
.setMock('worker-farm', () => () => undefined)
|
||||
.setMock('../../worker-farm', () => () => undefined)
|
||||
.setMock('uglify-js')
|
||||
.mock('image-size')
|
||||
.mock('fs')
|
||||
.mock('os')
|
||||
.mock('assert')
|
||||
.mock('progress')
|
||||
.mock('../../node-haste/DependencyGraph')
|
||||
.mock('../../JSTransformer')
|
||||
.mock('../../Resolver')
|
||||
.mock('../Bundle')
|
||||
.mock('../HMRBundle')
|
||||
.mock('../../Logger')
|
||||
.mock('/path/to/transformer.js', () => ({}), {virtual: true})
|
||||
;
|
||||
|
||||
var Bundler = require('../');
|
||||
var Resolver = require('../../Resolver');
|
||||
var defaults = require('../../defaults');
|
||||
var sizeOf = require('image-size');
|
||||
var fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const {any, objectContaining} = expect;
|
||||
|
||||
|
||||
var commonOptions = {
|
||||
allowBundleUpdates: false,
|
||||
assetExts: defaults.assetExts,
|
||||
cacheVersion: 'smth',
|
||||
extraNodeModules: {},
|
||||
platforms: defaults.platforms,
|
||||
resetCache: false,
|
||||
sourceExts: defaults.sourceExts,
|
||||
transformModulePath: '/path/to/transformer.js',
|
||||
watch: false,
|
||||
};
|
||||
|
||||
describe('Bundler', function() {
|
||||
|
||||
function createModule({
|
||||
path,
|
||||
id,
|
||||
dependencies,
|
||||
isAsset,
|
||||
isJSON,
|
||||
isPolyfill,
|
||||
resolution,
|
||||
}) {
|
||||
return {
|
||||
path,
|
||||
resolution,
|
||||
getDependencies: () => Promise.resolve(dependencies),
|
||||
getName: () => Promise.resolve(id),
|
||||
isJSON: () => isJSON,
|
||||
isAsset: () => isAsset,
|
||||
isPolyfill: () => isPolyfill,
|
||||
read: () => ({
|
||||
code: 'arbitrary',
|
||||
source: 'arbitrary',
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
var getDependencies;
|
||||
var getModuleSystemDependencies;
|
||||
var bundler;
|
||||
var assetServer;
|
||||
var modules;
|
||||
var projectRoots;
|
||||
|
||||
beforeEach(function() {
|
||||
os.cpus.mockReturnValue({length: 1});
|
||||
// local directory on purpose, because it should not actually write
|
||||
// anything to the disk during a unit test!
|
||||
os.tmpDir.mockReturnValue(path.join(__dirname));
|
||||
|
||||
getDependencies = jest.fn();
|
||||
getModuleSystemDependencies = jest.fn();
|
||||
projectRoots = ['/root'];
|
||||
|
||||
Resolver.mockImplementation(function() {
|
||||
return {
|
||||
getDependencies,
|
||||
getModuleSystemDependencies,
|
||||
};
|
||||
});
|
||||
Resolver.load = jest.fn().mockImplementation(opts => Promise.resolve(new Resolver(opts)));
|
||||
|
||||
fs.__setMockFilesystem({
|
||||
'path': {'to': {'transformer.js': ''}},
|
||||
});
|
||||
|
||||
fs.statSync.mockImplementation(function() {
|
||||
return {
|
||||
isDirectory: () => true,
|
||||
};
|
||||
});
|
||||
|
||||
fs.readFile.mockImplementation(function(file, callback) {
|
||||
callback(null, '{"json":true}');
|
||||
});
|
||||
|
||||
assetServer = {
|
||||
getAssetData: jest.fn(),
|
||||
};
|
||||
|
||||
bundler = new Bundler({
|
||||
...commonOptions,
|
||||
projectRoots,
|
||||
assetServer,
|
||||
});
|
||||
|
||||
modules = [
|
||||
createModule({id: 'foo', path: '/root/foo.js', dependencies: []}),
|
||||
createModule({id: 'bar', path: '/root/bar.js', dependencies: []}),
|
||||
createModule({
|
||||
id: 'new_image.png',
|
||||
path: '/root/img/new_image.png',
|
||||
isAsset: true,
|
||||
resolution: 2,
|
||||
dependencies: [],
|
||||
}),
|
||||
createModule({
|
||||
id: 'package/file.json',
|
||||
path: '/root/file.json',
|
||||
isJSON: true,
|
||||
dependencies: [],
|
||||
}),
|
||||
];
|
||||
|
||||
getDependencies.mockImplementation((main, options, transformOptions) =>
|
||||
Promise.resolve({
|
||||
mainModuleId: 'foo',
|
||||
dependencies: modules,
|
||||
options: transformOptions,
|
||||
getModuleId: () => 123,
|
||||
getResolvedDependencyPairs: () => [],
|
||||
})
|
||||
);
|
||||
|
||||
getModuleSystemDependencies.mockImplementation(function() {
|
||||
return [];
|
||||
});
|
||||
|
||||
sizeOf.mockImplementation(function(path, cb) {
|
||||
cb(null, {width: 50, height: 100});
|
||||
});
|
||||
});
|
||||
|
||||
it('gets the list of dependencies from the resolver', function() {
|
||||
const entryFile = '/root/foo.js';
|
||||
return bundler.getDependencies({entryFile, recursive: true}).then(() =>
|
||||
// jest calledWith does not support jasmine.any
|
||||
expect(getDependencies.mock.calls[0].slice(0, -2)).toEqual([
|
||||
'/root/foo.js',
|
||||
{dev: true, platform: undefined, recursive: true},
|
||||
{
|
||||
preloadedModules: undefined,
|
||||
ramGroups: undefined,
|
||||
transformer: {
|
||||
dev: true,
|
||||
minify: false,
|
||||
platform: undefined,
|
||||
transform: {
|
||||
dev: true,
|
||||
generateSourceMaps: false,
|
||||
hot: false,
|
||||
inlineRequires: false,
|
||||
platform: undefined,
|
||||
projectRoot: projectRoots[0],
|
||||
},
|
||||
},
|
||||
},
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
it('allows overriding the platforms array', () => {
|
||||
expect(bundler._opts.platforms).toEqual(['ios', 'android', 'windows', 'web']);
|
||||
const b = new Bundler({
|
||||
...commonOptions,
|
||||
projectRoots,
|
||||
assetServer,
|
||||
platforms: ['android', 'vr'],
|
||||
});
|
||||
expect(b._opts.platforms).toEqual(['android', 'vr']);
|
||||
});
|
||||
|
||||
describe('.bundle', () => {
|
||||
const mockAsset = {
|
||||
scales: [1, 2, 3],
|
||||
files: [
|
||||
'/root/img/img.png',
|
||||
'/root/img/img@2x.png',
|
||||
'/root/img/img@3x.png',
|
||||
],
|
||||
hash: 'i am a hash',
|
||||
name: 'img',
|
||||
type: 'png',
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
assetServer.getAssetData
|
||||
.mockImplementation(() => Promise.resolve(mockAsset));
|
||||
});
|
||||
|
||||
it('creates a bundle', function() {
|
||||
return bundler.bundle({
|
||||
entryFile: '/root/foo.js',
|
||||
runBeforeMainModule: [],
|
||||
runModule: true,
|
||||
sourceMapUrl: 'source_map_url',
|
||||
}).then(bundle => {
|
||||
const ithAddedModule = i => bundle.addModule.mock.calls[i][2].path;
|
||||
|
||||
expect(ithAddedModule(0)).toEqual('/root/foo.js');
|
||||
expect(ithAddedModule(1)).toEqual('/root/bar.js');
|
||||
expect(ithAddedModule(2)).toEqual('/root/img/new_image.png');
|
||||
expect(ithAddedModule(3)).toEqual('/root/file.json');
|
||||
|
||||
expect(bundle.finalize.mock.calls[0]).toEqual([{
|
||||
runModule: true,
|
||||
runBeforeMainModule: [],
|
||||
allowUpdates: false,
|
||||
}]);
|
||||
|
||||
expect(bundle.addAsset.mock.calls[0]).toEqual([{
|
||||
__packager_asset: true,
|
||||
fileSystemLocation: '/root/img',
|
||||
httpServerLocation: '/assets/img',
|
||||
width: 50,
|
||||
height: 100,
|
||||
scales: [1, 2, 3],
|
||||
files: [
|
||||
'/root/img/img.png',
|
||||
'/root/img/img@2x.png',
|
||||
'/root/img/img@3x.png',
|
||||
],
|
||||
hash: 'i am a hash',
|
||||
name: 'img',
|
||||
type: 'png',
|
||||
}]);
|
||||
|
||||
// TODO(amasad) This fails with 0 != 5 in OSS
|
||||
//expect(ProgressBar.prototype.tick.mock.calls.length).toEqual(modules.length);
|
||||
});
|
||||
});
|
||||
|
||||
it('loads and runs asset plugins', function() {
|
||||
jest.mock('mockPlugin1', () => {
|
||||
return asset => {
|
||||
asset.extraReverseHash = asset.hash.split('').reverse().join('');
|
||||
return asset;
|
||||
};
|
||||
}, {virtual: true});
|
||||
|
||||
jest.mock('asyncMockPlugin2', () => {
|
||||
return asset => {
|
||||
expect(asset.extraReverseHash).toBeDefined();
|
||||
return new Promise(resolve => {
|
||||
asset.extraPixelCount = asset.width * asset.height;
|
||||
resolve(asset);
|
||||
});
|
||||
};
|
||||
}, {virtual: true});
|
||||
|
||||
return bundler.bundle({
|
||||
entryFile: '/root/foo.js',
|
||||
runBeforeMainModule: [],
|
||||
runModule: true,
|
||||
sourceMapUrl: 'source_map_url',
|
||||
assetPlugins: ['mockPlugin1', 'asyncMockPlugin2'],
|
||||
}).then(bundle => {
|
||||
expect(bundle.addAsset.mock.calls[0]).toEqual([{
|
||||
__packager_asset: true,
|
||||
fileSystemLocation: '/root/img',
|
||||
httpServerLocation: '/assets/img',
|
||||
width: 50,
|
||||
height: 100,
|
||||
scales: [1, 2, 3],
|
||||
files: [
|
||||
'/root/img/img.png',
|
||||
'/root/img/img@2x.png',
|
||||
'/root/img/img@3x.png',
|
||||
],
|
||||
hash: 'i am a hash',
|
||||
name: 'img',
|
||||
type: 'png',
|
||||
extraReverseHash: 'hsah a ma i',
|
||||
extraPixelCount: 5000,
|
||||
}]);
|
||||
});
|
||||
});
|
||||
|
||||
it('calls the module post-processing function', () => {
|
||||
const postProcessModules = jest.fn().mockImplementation((ms, e) => ms);
|
||||
|
||||
const b = new Bundler({
|
||||
...commonOptions,
|
||||
postProcessModules,
|
||||
projectRoots,
|
||||
assetServer,
|
||||
});
|
||||
|
||||
const dev = false;
|
||||
const minify = true;
|
||||
const platform = 'arbitrary';
|
||||
|
||||
const entryFile = '/root/foo.js';
|
||||
return b.bundle({
|
||||
dev,
|
||||
entryFile,
|
||||
minify,
|
||||
platform,
|
||||
runBeforeMainModule: [],
|
||||
runModule: true,
|
||||
sourceMapUrl: 'source_map_url',
|
||||
}).then(() => {
|
||||
expect(postProcessModules)
|
||||
.toBeCalledWith(
|
||||
modules.map(x => objectContaining({
|
||||
name: any(String),
|
||||
id: any(Number),
|
||||
code: any(String),
|
||||
sourceCode: any(String),
|
||||
sourcePath: x.path,
|
||||
meta: any(Object),
|
||||
polyfill: !!x.isPolyfill(),
|
||||
})),
|
||||
entryFile,
|
||||
{dev, minify, platform},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('respects the order of modules returned by the post-processing function', () => {
|
||||
const postProcessModules = jest.fn().mockImplementation((ms, e) => ms.reverse());
|
||||
|
||||
const b = new Bundler({
|
||||
...commonOptions,
|
||||
postProcessModules,
|
||||
projectRoots,
|
||||
assetServer,
|
||||
});
|
||||
|
||||
const entryFile = '/root/foo.js';
|
||||
return b.bundle({
|
||||
entryFile,
|
||||
runBeforeMainModule: [],
|
||||
runModule: true,
|
||||
sourceMapUrl: 'source_map_url',
|
||||
}).then(bundle => {
|
||||
const ithAddedModule = i => bundle.addModule.mock.calls[i][2].path;
|
||||
|
||||
[
|
||||
'/root/file.json',
|
||||
'/root/img/new_image.png',
|
||||
'/root/bar.js',
|
||||
'/root/foo.js',
|
||||
].forEach((path, ix) => expect(ithAddedModule(ix)).toEqual(path));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('.getOrderedDependencyPaths', () => {
|
||||
beforeEach(() => {
|
||||
assetServer.getAssetData.mockImplementation(function(relPath) {
|
||||
if (relPath === 'img/new_image.png') {
|
||||
return Promise.resolve({
|
||||
scales: [1, 2, 3],
|
||||
files: [
|
||||
'/root/img/new_image.png',
|
||||
'/root/img/new_image@2x.png',
|
||||
'/root/img/new_image@3x.png',
|
||||
],
|
||||
hash: 'i am a hash',
|
||||
name: 'img',
|
||||
type: 'png',
|
||||
});
|
||||
} else if (relPath === 'img/new_image2.png') {
|
||||
return Promise.resolve({
|
||||
scales: [1, 2, 3],
|
||||
files: [
|
||||
'/root/img/new_image2.png',
|
||||
'/root/img/new_image2@2x.png',
|
||||
'/root/img/new_image2@3x.png',
|
||||
],
|
||||
hash: 'i am a hash',
|
||||
name: 'img',
|
||||
type: 'png',
|
||||
});
|
||||
}
|
||||
|
||||
throw new Error('unknown image ' + relPath);
|
||||
});
|
||||
});
|
||||
|
||||
it('should get the concrete list of all dependency files', () => {
|
||||
modules.push(
|
||||
createModule({
|
||||
id: 'new_image2.png',
|
||||
path: '/root/img/new_image2.png',
|
||||
isAsset: true,
|
||||
resolution: 2,
|
||||
dependencies: [],
|
||||
}),
|
||||
);
|
||||
|
||||
return bundler.getOrderedDependencyPaths('/root/foo.js', true)
|
||||
.then(paths => expect(paths).toEqual([
|
||||
'/root/foo.js',
|
||||
'/root/bar.js',
|
||||
'/root/img/new_image.png',
|
||||
'/root/img/new_image@2x.png',
|
||||
'/root/img/new_image@3x.png',
|
||||
'/root/file.json',
|
||||
'/root/img/new_image2.png',
|
||||
'/root/img/new_image2@2x.png',
|
||||
'/root/img/new_image2@3x.png',
|
||||
]));
|
||||
});
|
||||
|
||||
describe('number of workers', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env.REACT_NATIVE_MAX_WORKERS;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env.REACT_NATIVE_MAX_WORKERS;
|
||||
});
|
||||
|
||||
it('return correct number of workers', () => {
|
||||
os.cpus.mockReturnValue({length: 1});
|
||||
expect(Bundler.getMaxWorkerCount()).toBe(1);
|
||||
os.cpus.mockReturnValue({length: 8});
|
||||
expect(Bundler.getMaxWorkerCount()).toBe(6);
|
||||
os.cpus.mockReturnValue({length: 24});
|
||||
expect(Bundler.getMaxWorkerCount()).toBe(14);
|
||||
process.env.REACT_NATIVE_MAX_WORKERS = 5;
|
||||
expect(Bundler.getMaxWorkerCount()).toBe(5);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,873 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const crypto = require('crypto');
|
||||
const debug = require('debug')('RNP:Bundler');
|
||||
const emptyFunction = require('fbjs/lib/emptyFunction');
|
||||
const fs = require('fs');
|
||||
const Transformer = require('../JSTransformer');
|
||||
const Resolver = require('../Resolver');
|
||||
const Bundle = require('./Bundle');
|
||||
const HMRBundle = require('./HMRBundle');
|
||||
const ModuleTransport = require('../lib/ModuleTransport');
|
||||
const imageSize = require('image-size');
|
||||
const path = require('path');
|
||||
const denodeify = require('denodeify');
|
||||
const defaults = require('../defaults');
|
||||
const os = require('os');
|
||||
const invariant = require('fbjs/lib/invariant');
|
||||
const toLocalPath = require('../node-haste/lib/toLocalPath');
|
||||
|
||||
const {generateAssetTransformResult, isAssetTypeAnImage} = require('./util');
|
||||
|
||||
const {
|
||||
sep: pathSeparator,
|
||||
join: joinPath,
|
||||
dirname: pathDirname,
|
||||
extname,
|
||||
} = require('path');
|
||||
|
||||
const VERSION = require('../../package.json').version;
|
||||
|
||||
import type AssetServer from '../AssetServer';
|
||||
import type Module, {HasteImpl} from '../node-haste/Module';
|
||||
import type ResolutionResponse from '../node-haste/DependencyGraph/ResolutionResponse';
|
||||
import type {MappingsMap} from '../lib/SourceMap';
|
||||
import type {Options as JSTransformerOptions} from '../JSTransformer/worker';
|
||||
import type {Reporter} from '../lib/reporting';
|
||||
import type {TransformCache} from '../lib/TransformCaching';
|
||||
import type {GlobalTransformCache} from '../lib/GlobalTransformCache';
|
||||
|
||||
export type BundlingOptions = {|
|
||||
+preloadedModules: ?{[string]: true} | false,
|
||||
+ramGroups: ?Array<string>,
|
||||
+transformer: JSTransformerOptions,
|
||||
|};
|
||||
|
||||
export type ExtraTransformOptions = {
|
||||
+preloadedModules?: {[path: string]: true} | false,
|
||||
+ramGroups?: Array<string>,
|
||||
+transform?: {+inlineRequires?: {+blacklist: {[string]: true}} | boolean},
|
||||
};
|
||||
|
||||
export type GetTransformOptionsOpts = {|
|
||||
dev: boolean,
|
||||
hot: boolean,
|
||||
platform: ?string,
|
||||
|};
|
||||
|
||||
export type GetTransformOptions = (
|
||||
mainModuleName: string,
|
||||
options: GetTransformOptionsOpts,
|
||||
getDependenciesOf: string => Promise<Array<string>>,
|
||||
) => Promise<ExtraTransformOptions>;
|
||||
|
||||
export type AssetDescriptor = {
|
||||
+__packager_asset: boolean,
|
||||
+httpServerLocation: string,
|
||||
+width: ?number,
|
||||
+height: ?number,
|
||||
+scales: Array<number>,
|
||||
+hash: string,
|
||||
+name: string,
|
||||
+type: string,
|
||||
};
|
||||
|
||||
export type ExtendedAssetDescriptor = AssetDescriptor & {
|
||||
+fileSystemLocation: string,
|
||||
+files: Array<string>,
|
||||
};
|
||||
|
||||
const sizeOf = denodeify(imageSize);
|
||||
|
||||
const {
|
||||
createActionStartEntry,
|
||||
createActionEndEntry,
|
||||
log,
|
||||
} = require('../Logger');
|
||||
|
||||
export type PostProcessModulesOptions = {|
|
||||
dev: boolean,
|
||||
minify: boolean,
|
||||
platform: string,
|
||||
|};
|
||||
|
||||
export type PostProcessModules = (
|
||||
modules: Array<ModuleTransport>,
|
||||
entryFile: string,
|
||||
options: PostProcessModulesOptions,
|
||||
) => Array<ModuleTransport>;
|
||||
|
||||
export type PostMinifyProcess = ({
|
||||
code: string,
|
||||
map: MappingsMap,
|
||||
}) => {code: string, map: MappingsMap};
|
||||
|
||||
type Options = {|
|
||||
+allowBundleUpdates: boolean,
|
||||
+assetExts: Array<string>,
|
||||
+assetServer: AssetServer,
|
||||
+blacklistRE?: RegExp,
|
||||
+cacheVersion: string,
|
||||
+extraNodeModules: {},
|
||||
+getTransformOptions?: GetTransformOptions,
|
||||
+globalTransformCache: ?GlobalTransformCache,
|
||||
+hasteImpl?: HasteImpl,
|
||||
+platforms: Array<string>,
|
||||
+polyfillModuleNames: Array<string>,
|
||||
+postMinifyProcess: PostMinifyProcess,
|
||||
+postProcessModules?: PostProcessModules,
|
||||
+projectRoots: $ReadOnlyArray<string>,
|
||||
+providesModuleNodeModules?: Array<string>,
|
||||
+reporter: Reporter,
|
||||
+resetCache: boolean,
|
||||
+sourceExts: Array<string>,
|
||||
+transformCache: TransformCache,
|
||||
+transformModulePath: string,
|
||||
+transformTimeoutInterval: ?number,
|
||||
+watch: boolean,
|
||||
+workerPath: ?string,
|
||||
|};
|
||||
|
||||
const {hasOwnProperty} = Object;
|
||||
|
||||
class Bundler {
|
||||
|
||||
_opts: Options;
|
||||
_getModuleId: (opts: Module) => number;
|
||||
_transformer: Transformer;
|
||||
_resolverPromise: Promise<Resolver>;
|
||||
_projectRoots: $ReadOnlyArray<string>;
|
||||
_assetServer: AssetServer;
|
||||
_getTransformOptions: void | GetTransformOptions;
|
||||
|
||||
constructor(opts: Options) {
|
||||
this._opts = opts;
|
||||
|
||||
opts.projectRoots.forEach(verifyRootExists);
|
||||
|
||||
const transformModuleStr = fs.readFileSync(opts.transformModulePath);
|
||||
const transformModuleHash =
|
||||
crypto.createHash('sha1').update(transformModuleStr).digest('hex');
|
||||
|
||||
const stableProjectRoots = opts.projectRoots.map(p => {
|
||||
return path.relative(path.join(__dirname, '../../../..'), p);
|
||||
});
|
||||
|
||||
const cacheKeyParts = [
|
||||
'react-packager-cache',
|
||||
VERSION,
|
||||
opts.cacheVersion,
|
||||
stableProjectRoots.join(',').split(pathSeparator).join('-'),
|
||||
transformModuleHash,
|
||||
];
|
||||
|
||||
this._getModuleId = createModuleIdFactory();
|
||||
|
||||
let getCacheKey = (options: mixed) => '';
|
||||
if (opts.transformModulePath) {
|
||||
/* $FlowFixMe: dynamic requires prevent static typing :'( */
|
||||
const transformer = require(opts.transformModulePath);
|
||||
if (typeof transformer.getCacheKey !== 'undefined') {
|
||||
getCacheKey = transformer.getCacheKey;
|
||||
}
|
||||
}
|
||||
|
||||
const transformCacheKey = crypto.createHash('sha1').update(
|
||||
cacheKeyParts.join('$'),
|
||||
).digest('hex');
|
||||
|
||||
debug(`Using transform cache key "${transformCacheKey}"`);
|
||||
|
||||
const maxWorkerCount = Bundler.getMaxWorkerCount();
|
||||
|
||||
this._transformer = new Transformer(
|
||||
opts.transformModulePath,
|
||||
maxWorkerCount,
|
||||
{
|
||||
stdoutChunk: chunk => opts.reporter.update({type: 'worker_stdout_chunk', chunk}),
|
||||
stderrChunk: chunk => opts.reporter.update({type: 'worker_stderr_chunk', chunk}),
|
||||
},
|
||||
opts.workerPath,
|
||||
);
|
||||
|
||||
const getTransformCacheKey = options => {
|
||||
return transformCacheKey + getCacheKey(options);
|
||||
};
|
||||
|
||||
this._resolverPromise = Resolver.load({
|
||||
assetExts: opts.assetExts,
|
||||
blacklistRE: opts.blacklistRE,
|
||||
extraNodeModules: opts.extraNodeModules,
|
||||
getTransformCacheKey,
|
||||
globalTransformCache: opts.globalTransformCache,
|
||||
hasteImpl: opts.hasteImpl,
|
||||
maxWorkerCount,
|
||||
minifyCode: this._transformer.minify,
|
||||
postMinifyProcess: this._opts.postMinifyProcess,
|
||||
platforms: new Set(opts.platforms),
|
||||
polyfillModuleNames: opts.polyfillModuleNames,
|
||||
projectRoots: opts.projectRoots,
|
||||
providesModuleNodeModules:
|
||||
opts.providesModuleNodeModules || defaults.providesModuleNodeModules,
|
||||
reporter: opts.reporter,
|
||||
resetCache: opts.resetCache,
|
||||
sourceExts: opts.sourceExts,
|
||||
transformCode:
|
||||
(module, code, transformCodeOptions) => this._transformer.transformFile(
|
||||
module.path,
|
||||
module.localPath,
|
||||
code,
|
||||
transformCodeOptions,
|
||||
),
|
||||
transformCache: opts.transformCache,
|
||||
watch: opts.watch,
|
||||
});
|
||||
|
||||
this._projectRoots = opts.projectRoots;
|
||||
this._assetServer = opts.assetServer;
|
||||
|
||||
this._getTransformOptions = opts.getTransformOptions;
|
||||
}
|
||||
|
||||
end() {
|
||||
this._transformer.kill();
|
||||
return this._resolverPromise.then(
|
||||
resolver => resolver.getDependencyGraph().getWatcher().end(),
|
||||
);
|
||||
}
|
||||
|
||||
bundle(options: {
|
||||
dev: boolean,
|
||||
minify: boolean,
|
||||
unbundle: boolean,
|
||||
sourceMapUrl: ?string,
|
||||
}): Promise<Bundle> {
|
||||
const {dev, minify, unbundle} = options;
|
||||
return this._resolverPromise.then(
|
||||
resolver => resolver.getModuleSystemDependencies({dev, unbundle}),
|
||||
).then(moduleSystemDeps => this._bundle({
|
||||
...options,
|
||||
bundle: new Bundle({dev, minify, sourceMapUrl: options.sourceMapUrl}),
|
||||
moduleSystemDeps,
|
||||
}));
|
||||
}
|
||||
|
||||
_sourceHMRURL(platform: ?string, hmrpath: string) {
|
||||
return this._hmrURL(
|
||||
'',
|
||||
platform,
|
||||
'bundle',
|
||||
hmrpath,
|
||||
);
|
||||
}
|
||||
|
||||
_sourceMappingHMRURL(platform: ?string, hmrpath: string) {
|
||||
// Chrome expects `sourceURL` when eval'ing code
|
||||
return this._hmrURL(
|
||||
'\/\/# sourceURL=',
|
||||
platform,
|
||||
'map',
|
||||
hmrpath,
|
||||
);
|
||||
}
|
||||
|
||||
_hmrURL(prefix: string, platform: ?string, extensionOverride: string, filePath: string) {
|
||||
const matchingRoot = this._projectRoots.find(root => filePath.startsWith(root));
|
||||
|
||||
if (!matchingRoot) {
|
||||
throw new Error('No matching project root for ' + filePath);
|
||||
}
|
||||
|
||||
// Replaces '\' with '/' for Windows paths.
|
||||
if (pathSeparator === '\\') {
|
||||
filePath = filePath.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
const extensionStart = filePath.lastIndexOf('.');
|
||||
const resource = filePath.substring(
|
||||
matchingRoot.length,
|
||||
extensionStart !== -1 ? extensionStart : undefined,
|
||||
);
|
||||
|
||||
return (
|
||||
prefix + resource +
|
||||
'.' + extensionOverride + '?' +
|
||||
'platform=' + (platform || '') + '&runModule=false&entryModuleOnly=true&hot=true'
|
||||
);
|
||||
}
|
||||
|
||||
hmrBundle(options: {platform: ?string}, host: string, port: number): Promise<HMRBundle> {
|
||||
return this._bundle({
|
||||
...options,
|
||||
bundle: new HMRBundle({
|
||||
sourceURLFn: this._sourceHMRURL.bind(this, options.platform),
|
||||
sourceMappingURLFn: this._sourceMappingHMRURL.bind(
|
||||
this,
|
||||
options.platform,
|
||||
),
|
||||
}),
|
||||
hot: true,
|
||||
dev: true,
|
||||
});
|
||||
}
|
||||
|
||||
_bundle({
|
||||
assetPlugins,
|
||||
bundle,
|
||||
dev,
|
||||
entryFile,
|
||||
entryModuleOnly,
|
||||
generateSourceMaps,
|
||||
hot,
|
||||
isolateModuleIDs,
|
||||
minify,
|
||||
moduleSystemDeps = [],
|
||||
onProgress,
|
||||
platform,
|
||||
resolutionResponse,
|
||||
runBeforeMainModule,
|
||||
runModule,
|
||||
unbundle,
|
||||
}: {
|
||||
assetPlugins?: Array<string>,
|
||||
bundle: Bundle | HMRBundle,
|
||||
dev: boolean,
|
||||
entryFile?: string,
|
||||
entryModuleOnly?: boolean,
|
||||
generateSourceMaps?: boolean,
|
||||
hot?: boolean,
|
||||
isolateModuleIDs?: boolean,
|
||||
minify?: boolean,
|
||||
moduleSystemDeps?: Array<Module>,
|
||||
onProgress?: () => void,
|
||||
platform?: ?string,
|
||||
resolutionResponse?: ResolutionResponse<Module, BundlingOptions>,
|
||||
runBeforeMainModule?: boolean,
|
||||
runModule?: boolean,
|
||||
unbundle?: boolean,
|
||||
}) {
|
||||
const onResolutionResponse = (response: ResolutionResponse<Module, BundlingOptions>) => {
|
||||
/* $FlowFixMe: looks like ResolutionResponse is monkey-patched
|
||||
* with `getModuleId`. */
|
||||
bundle.setMainModuleId(response.getModuleId(getMainModule(response)));
|
||||
if (entryModuleOnly && entryFile) {
|
||||
response.dependencies = response.dependencies.filter(module =>
|
||||
module.path.endsWith(entryFile || '')
|
||||
);
|
||||
} else {
|
||||
response.dependencies = moduleSystemDeps.concat(response.dependencies);
|
||||
}
|
||||
};
|
||||
const finalizeBundle = ({bundle: finalBundle, transformedModules, response, modulesByName}: {
|
||||
bundle: Bundle,
|
||||
transformedModules: Array<{module: Module, transformed: ModuleTransport}>,
|
||||
response: ResolutionResponse<Module, BundlingOptions>,
|
||||
modulesByName: {[name: string]: Module},
|
||||
}) =>
|
||||
this._resolverPromise.then(resolver => Promise.all(
|
||||
transformedModules.map(({module, transformed}) =>
|
||||
finalBundle.addModule(resolver, response, module, transformed)
|
||||
)
|
||||
)).then(() => {
|
||||
const runBeforeMainModuleIds = Array.isArray(runBeforeMainModule)
|
||||
? runBeforeMainModule
|
||||
.map(name => modulesByName[name])
|
||||
.filter(Boolean)
|
||||
.map(response.getModuleId)
|
||||
: undefined;
|
||||
|
||||
finalBundle.finalize({
|
||||
runModule,
|
||||
runBeforeMainModule: runBeforeMainModuleIds,
|
||||
allowUpdates: this._opts.allowBundleUpdates,
|
||||
});
|
||||
return finalBundle;
|
||||
});
|
||||
|
||||
return this._buildBundle({
|
||||
entryFile,
|
||||
dev,
|
||||
minify,
|
||||
platform,
|
||||
bundle,
|
||||
hot,
|
||||
unbundle,
|
||||
resolutionResponse,
|
||||
onResolutionResponse,
|
||||
finalizeBundle,
|
||||
isolateModuleIDs,
|
||||
generateSourceMaps,
|
||||
assetPlugins,
|
||||
onProgress,
|
||||
});
|
||||
}
|
||||
|
||||
_buildBundle({
|
||||
entryFile,
|
||||
dev,
|
||||
minify,
|
||||
platform,
|
||||
bundle,
|
||||
hot,
|
||||
unbundle,
|
||||
resolutionResponse,
|
||||
isolateModuleIDs,
|
||||
generateSourceMaps,
|
||||
assetPlugins,
|
||||
onResolutionResponse = emptyFunction,
|
||||
onModuleTransformed = emptyFunction,
|
||||
finalizeBundle = emptyFunction,
|
||||
onProgress = emptyFunction,
|
||||
}: *) {
|
||||
const transformingFilesLogEntry =
|
||||
log(createActionStartEntry({
|
||||
action_name: 'Transforming files',
|
||||
entry_point: entryFile,
|
||||
environment: dev ? 'dev' : 'prod',
|
||||
}));
|
||||
|
||||
const modulesByName = Object.create(null);
|
||||
|
||||
if (!resolutionResponse) {
|
||||
resolutionResponse = this.getDependencies({
|
||||
entryFile,
|
||||
dev,
|
||||
platform,
|
||||
hot,
|
||||
onProgress,
|
||||
minify,
|
||||
isolateModuleIDs,
|
||||
generateSourceMaps: unbundle || minify || generateSourceMaps,
|
||||
});
|
||||
}
|
||||
|
||||
return Promise.all(
|
||||
[this._resolverPromise, resolutionResponse],
|
||||
).then(([resolver, response]) => {
|
||||
bundle.setRamGroups(response.options.ramGroups);
|
||||
|
||||
log(createActionEndEntry(transformingFilesLogEntry));
|
||||
onResolutionResponse(response);
|
||||
|
||||
// get entry file complete path (`entryFile` is a local path, i.e. relative to roots)
|
||||
let entryFilePath;
|
||||
if (response.dependencies.length > 1) { // skip HMR requests
|
||||
const numModuleSystemDependencies =
|
||||
resolver.getModuleSystemDependencies({dev, unbundle}).length;
|
||||
|
||||
const dependencyIndex =
|
||||
(response.numPrependedDependencies || 0) + numModuleSystemDependencies;
|
||||
|
||||
if (dependencyIndex in response.dependencies) {
|
||||
entryFilePath = response.dependencies[dependencyIndex].path;
|
||||
}
|
||||
}
|
||||
|
||||
const modulesByTransport: Map<ModuleTransport, Module> = new Map();
|
||||
const toModuleTransport: Module => Promise<ModuleTransport> =
|
||||
module =>
|
||||
this._toModuleTransport({
|
||||
module,
|
||||
bundle,
|
||||
entryFilePath,
|
||||
assetPlugins,
|
||||
options: response.options,
|
||||
/* $FlowFixMe: `getModuleId` is monkey-patched */
|
||||
getModuleId: (response.getModuleId: () => number),
|
||||
dependencyPairs: response.getResolvedDependencyPairs(module),
|
||||
}).then(transformed => {
|
||||
modulesByTransport.set(transformed, module);
|
||||
modulesByName[transformed.name] = module;
|
||||
onModuleTransformed({
|
||||
module,
|
||||
response,
|
||||
bundle,
|
||||
transformed,
|
||||
});
|
||||
return transformed;
|
||||
});
|
||||
|
||||
const p = this._opts.postProcessModules;
|
||||
const postProcess = p
|
||||
? modules => p(modules, entryFile, {dev, minify, platform})
|
||||
: null;
|
||||
|
||||
return Promise.all(response.dependencies.map(toModuleTransport))
|
||||
.then(postProcess)
|
||||
.then(moduleTransports => {
|
||||
const transformedModules = moduleTransports.map(transformed => ({
|
||||
module: modulesByTransport.get(transformed),
|
||||
transformed,
|
||||
}));
|
||||
return finalizeBundle({bundle, transformedModules, response, modulesByName});
|
||||
}).then(() => bundle);
|
||||
});
|
||||
}
|
||||
|
||||
getShallowDependencies({
|
||||
entryFile,
|
||||
platform,
|
||||
dev = true,
|
||||
minify = !dev,
|
||||
hot = false,
|
||||
generateSourceMaps = false,
|
||||
}: {
|
||||
entryFile: string,
|
||||
platform: ?string,
|
||||
dev?: boolean,
|
||||
minify?: boolean,
|
||||
hot?: boolean,
|
||||
generateSourceMaps?: boolean,
|
||||
}): Promise<Array<Module>> {
|
||||
return this.getTransformOptions(
|
||||
entryFile,
|
||||
{
|
||||
dev,
|
||||
generateSourceMaps,
|
||||
hot,
|
||||
minify,
|
||||
platform,
|
||||
projectRoots: this._projectRoots,
|
||||
},
|
||||
).then(bundlingOptions =>
|
||||
this._resolverPromise.then(resolver =>
|
||||
resolver.getShallowDependencies(entryFile, bundlingOptions.transformer),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
getModuleForPath(entryFile: string): Promise<Module> {
|
||||
return this._resolverPromise.then(resolver => resolver.getModuleForPath(entryFile));
|
||||
}
|
||||
|
||||
async getDependencies({
|
||||
entryFile,
|
||||
platform,
|
||||
dev = true,
|
||||
minify = !dev,
|
||||
hot = false,
|
||||
recursive = true,
|
||||
generateSourceMaps = false,
|
||||
isolateModuleIDs = false,
|
||||
onProgress,
|
||||
}: {
|
||||
entryFile: string,
|
||||
platform: ?string,
|
||||
dev?: boolean,
|
||||
minify?: boolean,
|
||||
hot?: boolean,
|
||||
recursive?: boolean,
|
||||
generateSourceMaps?: boolean,
|
||||
isolateModuleIDs?: boolean,
|
||||
onProgress?: ?(finishedModules: number, totalModules: number) => mixed,
|
||||
}): Promise<ResolutionResponse<Module, BundlingOptions>> {
|
||||
const bundlingOptions: BundlingOptions = await this.getTransformOptions(
|
||||
entryFile,
|
||||
{
|
||||
dev,
|
||||
platform,
|
||||
hot,
|
||||
generateSourceMaps,
|
||||
minify,
|
||||
projectRoots: this._projectRoots,
|
||||
},
|
||||
);
|
||||
|
||||
const resolver = await this._resolverPromise;
|
||||
const response = await resolver.getDependencies(
|
||||
entryFile,
|
||||
{dev, platform, recursive},
|
||||
bundlingOptions,
|
||||
onProgress,
|
||||
isolateModuleIDs ? createModuleIdFactory() : this._getModuleId,
|
||||
);
|
||||
return response;
|
||||
}
|
||||
|
||||
getOrderedDependencyPaths({entryFile, dev, platform, minify, generateSourceMaps}: {
|
||||
+entryFile: string,
|
||||
+dev: boolean,
|
||||
+platform: string,
|
||||
+minify: boolean,
|
||||
+generateSourceMaps: boolean,
|
||||
}) {
|
||||
return this.getDependencies({entryFile, dev, platform, minify, generateSourceMaps}).then(
|
||||
({dependencies}) => {
|
||||
const ret = [];
|
||||
const promises = [];
|
||||
const placeHolder = {};
|
||||
dependencies.forEach(dep => {
|
||||
if (dep.isAsset()) {
|
||||
const localPath = toLocalPath(
|
||||
this._projectRoots,
|
||||
dep.path
|
||||
);
|
||||
promises.push(
|
||||
this._assetServer.getAssetData(localPath, platform)
|
||||
);
|
||||
ret.push(placeHolder);
|
||||
} else {
|
||||
ret.push(dep.path);
|
||||
}
|
||||
});
|
||||
|
||||
return Promise.all(promises).then(assetsData => {
|
||||
assetsData.forEach(({files}) => {
|
||||
const index = ret.indexOf(placeHolder);
|
||||
ret.splice(index, 1, ...files);
|
||||
});
|
||||
return ret;
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
_toModuleTransport({
|
||||
module,
|
||||
bundle,
|
||||
entryFilePath,
|
||||
options,
|
||||
getModuleId,
|
||||
dependencyPairs,
|
||||
assetPlugins,
|
||||
}: {
|
||||
module: Module,
|
||||
bundle: Bundle,
|
||||
entryFilePath: string,
|
||||
options: BundlingOptions,
|
||||
getModuleId: (module: Module) => number,
|
||||
dependencyPairs: Array<[string, Module]>,
|
||||
assetPlugins: Array<string>,
|
||||
}): Promise<ModuleTransport> {
|
||||
let moduleTransport;
|
||||
const moduleId = getModuleId(module);
|
||||
const transformOptions = options.transformer;
|
||||
|
||||
if (module.isAsset()) {
|
||||
moduleTransport = this._generateAssetModule(
|
||||
bundle, module, moduleId, assetPlugins, transformOptions.platform);
|
||||
}
|
||||
|
||||
if (moduleTransport) {
|
||||
return Promise.resolve(moduleTransport);
|
||||
}
|
||||
|
||||
return Promise.all([
|
||||
module.getName(),
|
||||
module.read(transformOptions),
|
||||
]).then((
|
||||
[name, {code, dependencies, dependencyOffsets, map, source}]
|
||||
) => {
|
||||
const {preloadedModules} = options;
|
||||
const isPolyfill = module.isPolyfill();
|
||||
const preloaded =
|
||||
module.path === entryFilePath ||
|
||||
isPolyfill ||
|
||||
preloadedModules && hasOwnProperty.call(preloadedModules, module.path);
|
||||
|
||||
return new ModuleTransport({
|
||||
name,
|
||||
id: moduleId,
|
||||
code,
|
||||
map,
|
||||
meta: {dependencies, dependencyOffsets, preloaded, dependencyPairs},
|
||||
polyfill: isPolyfill,
|
||||
sourceCode: source,
|
||||
sourcePath: module.path,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_generateAssetObjAndCode(
|
||||
module: Module,
|
||||
assetPlugins: Array<string>,
|
||||
platform: ?string = null,
|
||||
) {
|
||||
const localPath = toLocalPath(this._projectRoots, module.path);
|
||||
var assetUrlPath = joinPath('/assets', pathDirname(localPath));
|
||||
|
||||
// On Windows, change backslashes to slashes to get proper URL path from file path.
|
||||
if (pathSeparator === '\\') {
|
||||
assetUrlPath = assetUrlPath.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
const isImage = isAssetTypeAnImage(extname(module.path).slice(1));
|
||||
|
||||
return this._assetServer.getAssetData(localPath, platform).then(assetData => {
|
||||
return Promise.all([isImage ? sizeOf(assetData.files[0]) : null, assetData]);
|
||||
}).then(res => {
|
||||
const dimensions = res[0];
|
||||
const assetData = res[1];
|
||||
const scale = assetData.scales[0];
|
||||
const asset = {
|
||||
__packager_asset: true,
|
||||
fileSystemLocation: pathDirname(module.path),
|
||||
httpServerLocation: assetUrlPath,
|
||||
width: dimensions ? dimensions.width / scale : undefined,
|
||||
height: dimensions ? dimensions.height / scale : undefined,
|
||||
scales: assetData.scales,
|
||||
files: assetData.files,
|
||||
hash: assetData.hash,
|
||||
name: assetData.name,
|
||||
type: assetData.type,
|
||||
};
|
||||
|
||||
return this._applyAssetPlugins(assetPlugins, asset);
|
||||
}).then(asset => {
|
||||
const {code, dependencies, dependencyOffsets} = generateAssetTransformResult(asset);
|
||||
return {
|
||||
asset,
|
||||
code,
|
||||
meta: {dependencies, dependencyOffsets, preloaded: null},
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
_applyAssetPlugins(
|
||||
assetPlugins: Array<string>,
|
||||
asset: ExtendedAssetDescriptor,
|
||||
) {
|
||||
if (!assetPlugins.length) {
|
||||
return asset;
|
||||
}
|
||||
|
||||
const [currentAssetPlugin, ...remainingAssetPlugins] = assetPlugins;
|
||||
/* $FlowFixMe: dynamic requires prevent static typing :'( */
|
||||
const assetPluginFunction = require(currentAssetPlugin);
|
||||
const result = assetPluginFunction(asset);
|
||||
|
||||
// If the plugin was an async function, wait for it to fulfill before
|
||||
// applying the remaining plugins
|
||||
if (typeof result.then === 'function') {
|
||||
return result.then(resultAsset =>
|
||||
this._applyAssetPlugins(remainingAssetPlugins, resultAsset)
|
||||
);
|
||||
} else {
|
||||
return this._applyAssetPlugins(remainingAssetPlugins, result);
|
||||
}
|
||||
}
|
||||
|
||||
_generateAssetModule(
|
||||
bundle: Bundle,
|
||||
module: Module,
|
||||
moduleId: number,
|
||||
assetPlugins: Array<string> = [],
|
||||
platform: ?string = null,
|
||||
) {
|
||||
return Promise.all([
|
||||
module.getName(),
|
||||
this._generateAssetObjAndCode(module, assetPlugins, platform),
|
||||
]).then(([name, {asset, code, meta}]) => {
|
||||
bundle.addAsset(asset);
|
||||
return new ModuleTransport({
|
||||
name,
|
||||
id: moduleId,
|
||||
code,
|
||||
meta,
|
||||
sourceCode: code,
|
||||
sourcePath: module.path,
|
||||
virtual: true,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async getTransformOptions(
|
||||
mainModuleName: string,
|
||||
options: {|
|
||||
dev: boolean,
|
||||
generateSourceMaps: boolean,
|
||||
hot: boolean,
|
||||
minify: boolean,
|
||||
platform: ?string,
|
||||
projectRoots: $ReadOnlyArray<string>,
|
||||
|},
|
||||
): Promise<BundlingOptions> {
|
||||
const getDependencies = (entryFile: string) =>
|
||||
this.getDependencies({...options, entryFile})
|
||||
.then(r => r.dependencies.map(d => d.path));
|
||||
|
||||
const {dev, hot, platform} = options;
|
||||
const extraOptions: ExtraTransformOptions = this._getTransformOptions
|
||||
? await this._getTransformOptions(mainModuleName, {dev, hot, platform}, getDependencies)
|
||||
: {};
|
||||
|
||||
const {transform = {}} = extraOptions;
|
||||
|
||||
return {
|
||||
transformer: {
|
||||
dev,
|
||||
minify: options.minify,
|
||||
platform,
|
||||
transform: {
|
||||
dev,
|
||||
generateSourceMaps: options.generateSourceMaps,
|
||||
hot,
|
||||
inlineRequires: transform.inlineRequires || false,
|
||||
platform,
|
||||
projectRoot: options.projectRoots[0],
|
||||
},
|
||||
},
|
||||
preloadedModules: extraOptions.preloadedModules,
|
||||
ramGroups: extraOptions.ramGroups,
|
||||
};
|
||||
}
|
||||
|
||||
getResolver(): Promise<Resolver> {
|
||||
return this._resolverPromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unless overriden, we use a diminishing amount of workers per core, because
|
||||
* using more and more of them does not scale much. Ex. 6 workers for 8
|
||||
* cores, or 14 workers for 24 cores.
|
||||
*/
|
||||
static getMaxWorkerCount() {
|
||||
const cores = os.cpus().length;
|
||||
const envStr = process.env.REACT_NATIVE_MAX_WORKERS;
|
||||
if (envStr == null) {
|
||||
return Math.max(1, Math.ceil(cores * (0.5 + 0.5 * Math.exp(-cores * 0.07)) - 1));
|
||||
}
|
||||
const envCount = parseInt(process.env.REACT_NATIVE_MAX_WORKERS, 10);
|
||||
invariant(
|
||||
Number.isInteger(envCount),
|
||||
'environment variable `REACT_NATIVE_MAX_WORKERS` must be a valid integer',
|
||||
);
|
||||
return Math.min(cores, envCount);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function verifyRootExists(root) {
|
||||
// Verify that the root exists.
|
||||
assert(fs.statSync(root).isDirectory(), 'Root has to be a valid directory');
|
||||
}
|
||||
|
||||
function createModuleIdFactory() {
|
||||
const fileToIdMap = Object.create(null);
|
||||
let nextId = 0;
|
||||
return ({path: modulePath}) => {
|
||||
if (!(modulePath in fileToIdMap)) {
|
||||
fileToIdMap[modulePath] = nextId;
|
||||
nextId += 1;
|
||||
}
|
||||
return fileToIdMap[modulePath];
|
||||
};
|
||||
}
|
||||
|
||||
function getMainModule({dependencies, numPrependedDependencies = 0}) {
|
||||
return dependencies[numPrependedDependencies];
|
||||
}
|
||||
|
||||
module.exports = Bundler;
|
|
@ -1,108 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const encode = require('./encode');
|
||||
|
||||
const MAX_SEGMENT_LENGTH = 7;
|
||||
const ONE_MEG = 1024 * 1024;
|
||||
const COMMA = 0x2c;
|
||||
const SEMICOLON = 0x3b;
|
||||
|
||||
/**
|
||||
* Efficient builder for base64 VLQ mappings strings.
|
||||
*
|
||||
* This class uses a buffer that is preallocated with one megabyte and is
|
||||
* reallocated dynamically as needed, doubling its size.
|
||||
*
|
||||
* Encoding never creates any complex value types (strings, objects), and only
|
||||
* writes character values to the buffer.
|
||||
*
|
||||
* For details about source map terminology and specification, check
|
||||
* https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||
*/
|
||||
class B64Builder {
|
||||
buffer: Buffer;
|
||||
pos: number;
|
||||
hasSegment: boolean;
|
||||
|
||||
constructor() {
|
||||
this.buffer = new Buffer(ONE_MEG);
|
||||
this.pos = 0;
|
||||
this.hasSegment = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds `n` markers for generated lines to the mappings.
|
||||
*/
|
||||
markLines(n: number) {
|
||||
if (n < 1) {
|
||||
return this;
|
||||
}
|
||||
this.hasSegment = false;
|
||||
if (this.pos + n >= this.buffer.length) {
|
||||
this._realloc();
|
||||
}
|
||||
while (n--) {
|
||||
this.buffer[this.pos++] = SEMICOLON;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a segment at the specified column offset in the current line.
|
||||
*/
|
||||
startSegment(column: number) {
|
||||
if (this.hasSegment) {
|
||||
this._writeByte(COMMA);
|
||||
} else {
|
||||
this.hasSegment = true;
|
||||
}
|
||||
|
||||
this.append(column);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends a single number to the mappings.
|
||||
*/
|
||||
append(value: number) {
|
||||
if (this.pos + MAX_SEGMENT_LENGTH >= this.buffer.length) {
|
||||
this._realloc();
|
||||
}
|
||||
|
||||
this.pos = encode(value, this.buffer, this.pos);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the string representation of the mappings.
|
||||
*/
|
||||
toString() {
|
||||
return this.buffer.toString('ascii', 0, this.pos);
|
||||
}
|
||||
|
||||
_writeByte(byte: number) {
|
||||
if (this.pos === this.buffer.length) {
|
||||
this._realloc();
|
||||
}
|
||||
this.buffer[this.pos++] = byte;
|
||||
}
|
||||
|
||||
_realloc() {
|
||||
const {buffer} = this;
|
||||
this.buffer = new Buffer(buffer.length * 2);
|
||||
buffer.copy(this.buffer);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = B64Builder;
|
|
@ -1,195 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const B64Builder = require('./B64Builder');
|
||||
|
||||
import type {MappingsMap} from '../../lib/SourceMap';
|
||||
|
||||
/**
|
||||
* Generates a source map from raw mappings.
|
||||
*
|
||||
* Raw mappings are a set of 2, 4, or five elements:
|
||||
*
|
||||
* - line and column number in the generated source
|
||||
* - line and column number in the original source
|
||||
* - symbol name in the original source
|
||||
*
|
||||
* Mappings have to be passed in the order appearance in the generated source.
|
||||
*/
|
||||
class Generator {
|
||||
builder: B64Builder;
|
||||
last: {|
|
||||
generatedColumn: number,
|
||||
generatedLine: number,
|
||||
name: number,
|
||||
source: number,
|
||||
sourceColumn: number,
|
||||
sourceLine: number,
|
||||
|};
|
||||
names: IndexedSet;
|
||||
source: number;
|
||||
sources: Array<string>;
|
||||
sourcesContent: Array<?string>;
|
||||
|
||||
constructor() {
|
||||
this.builder = new B64Builder();
|
||||
this.last = {
|
||||
generatedColumn: 0,
|
||||
generatedLine: 1, // lines are passed in 1-indexed
|
||||
name: 0,
|
||||
source: 0,
|
||||
sourceColumn: 0,
|
||||
sourceLine: 1,
|
||||
};
|
||||
this.names = new IndexedSet();
|
||||
this.source = -1;
|
||||
this.sources = [];
|
||||
this.sourcesContent = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark the beginning of a new source file.
|
||||
*/
|
||||
startFile(file: string, code: string) {
|
||||
this.source = this.sources.push(file) - 1;
|
||||
this.sourcesContent.push(code);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark the end of the current source file
|
||||
*/
|
||||
endFile() {
|
||||
this.source = -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a mapping for generated code without a corresponding source location.
|
||||
*/
|
||||
addSimpleMapping(generatedLine: number, generatedColumn: number): void {
|
||||
const last = this.last;
|
||||
if (this.source === -1 ||
|
||||
generatedLine === last.generatedLine &&
|
||||
generatedColumn < last.generatedColumn ||
|
||||
generatedLine < last.generatedLine) {
|
||||
const msg = this.source === -1
|
||||
? 'Cannot add mapping before starting a file with `addFile()`'
|
||||
: 'Mapping is for a position preceding an earlier mapping';
|
||||
throw new Error(msg);
|
||||
}
|
||||
|
||||
if (generatedLine > last.generatedLine) {
|
||||
this.builder.markLines(generatedLine - last.generatedLine);
|
||||
last.generatedLine = generatedLine;
|
||||
last.generatedColumn = 0;
|
||||
}
|
||||
|
||||
this.builder.startSegment(generatedColumn - last.generatedColumn);
|
||||
last.generatedColumn = generatedColumn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a mapping for generated code with a corresponding source location.
|
||||
*/
|
||||
addSourceMapping(
|
||||
generatedLine: number,
|
||||
generatedColumn: number,
|
||||
sourceLine: number,
|
||||
sourceColumn: number,
|
||||
): void {
|
||||
this.addSimpleMapping(generatedLine, generatedColumn);
|
||||
|
||||
const last = this.last;
|
||||
this.builder
|
||||
.append(this.source - last.source)
|
||||
.append(sourceLine - last.sourceLine)
|
||||
.append(sourceColumn - last.sourceColumn);
|
||||
|
||||
last.source = this.source;
|
||||
last.sourceColumn = sourceColumn;
|
||||
last.sourceLine = sourceLine;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a mapping for code with a corresponding source location + symbol name.
|
||||
*/
|
||||
addNamedSourceMapping(
|
||||
generatedLine: number,
|
||||
generatedColumn: number,
|
||||
sourceLine: number,
|
||||
sourceColumn: number,
|
||||
name: string,
|
||||
): void {
|
||||
this.addSourceMapping(
|
||||
generatedLine, generatedColumn, sourceLine, sourceColumn);
|
||||
|
||||
const last = this.last;
|
||||
const nameIndex = this.names.indexFor(name);
|
||||
this.builder.append(nameIndex - last.name);
|
||||
last.name = nameIndex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the source map as object.
|
||||
*/
|
||||
toMap(file?: string): MappingsMap {
|
||||
return {
|
||||
version: 3,
|
||||
file,
|
||||
sources: this.sources.slice(),
|
||||
sourcesContent: this.sourcesContent.slice(),
|
||||
names: this.names.items(),
|
||||
mappings: this.builder.toString(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the source map as string.
|
||||
*
|
||||
* This is ~2.5x faster than calling `JSON.stringify(generator.toMap())`
|
||||
*/
|
||||
toString(file?: string): string {
|
||||
return ('{' +
|
||||
'"version":3,' +
|
||||
(file ? `"file":${JSON.stringify(file)},` : '') +
|
||||
`"sources":${JSON.stringify(this.sources)},` +
|
||||
`"sourcesContent":${JSON.stringify(this.sourcesContent)},` +
|
||||
`"names":${JSON.stringify(this.names.items())},` +
|
||||
`"mappings":"${this.builder.toString()}"` +
|
||||
'}');
|
||||
}
|
||||
}
|
||||
|
||||
class IndexedSet {
|
||||
map: Map<string, number>;
|
||||
nextIndex: number;
|
||||
|
||||
constructor() {
|
||||
this.map = new Map();
|
||||
this.nextIndex = 0;
|
||||
}
|
||||
|
||||
indexFor(x: string) {
|
||||
let index = this.map.get(x);
|
||||
if (index == null) {
|
||||
index = this.nextIndex++;
|
||||
this.map.set(x, index);
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
items() {
|
||||
return Array.from(this.map.keys());
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Generator;
|
|
@ -1,126 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const B64Builder = require('../B64Builder');
|
||||
|
||||
let builder;
|
||||
beforeEach(() => {
|
||||
builder = new B64Builder();
|
||||
});
|
||||
|
||||
it('exposes a fluent interface', () => {
|
||||
expect(builder.markLines(0)).toBe(builder);
|
||||
expect(builder.markLines(3)).toBe(builder);
|
||||
expect(builder.startSegment()).toBe(builder);
|
||||
expect(builder.append(4)).toBe(builder);
|
||||
});
|
||||
|
||||
it('can create an empty string', () => {
|
||||
expect(builder.toString()).toEqual('');
|
||||
});
|
||||
|
||||
it('can mark a new line in the generated code', () => {
|
||||
builder.markLines(1);
|
||||
expect(builder.toString()).toEqual(';');
|
||||
});
|
||||
|
||||
it('can mark multiple new lines in the generated code', () => {
|
||||
builder.markLines(4);
|
||||
expect(builder.toString()).toEqual(';;;;');
|
||||
});
|
||||
|
||||
it('can mark zero new lines in the generated code', () => {
|
||||
builder.markLines(0);
|
||||
expect(builder.toString()).toEqual('');
|
||||
});
|
||||
|
||||
it('does not add commas when just starting a segment', () => {
|
||||
builder.startSegment(0);
|
||||
expect(builder.toString()).toEqual('A');
|
||||
});
|
||||
|
||||
it('adds a comma when starting a segment after another segment', () => {
|
||||
builder.startSegment(0);
|
||||
builder.startSegment(1);
|
||||
expect(builder.toString()).toEqual('A,C');
|
||||
});
|
||||
|
||||
it('does not add a comma when starting a segment after marking a line', () => {
|
||||
builder.startSegment(0);
|
||||
builder.markLines(1);
|
||||
builder.startSegment(0);
|
||||
expect(builder.toString()).toEqual('A;A');
|
||||
});
|
||||
|
||||
it('adds a comma when starting a segment after calling `markLines(0)`', () => {
|
||||
builder.startSegment(0);
|
||||
builder.markLines(0);
|
||||
builder.startSegment(1);
|
||||
expect(builder.toString()).toEqual('A,C');
|
||||
});
|
||||
|
||||
it('can append values that fit within 5 bits (including sign bit)', () => {
|
||||
builder.append(0b1111);
|
||||
builder.append(-0b1111);
|
||||
expect(builder.toString()).toEqual('ef');
|
||||
});
|
||||
|
||||
it('can append values that fit within 10 bits (including sign bit)', () => {
|
||||
builder.append(0b111100110);
|
||||
builder.append(-0b110110011);
|
||||
expect(builder.toString()).toEqual('senb');
|
||||
});
|
||||
|
||||
it('can append values that fit within 15 bits (including sign bit)', () => {
|
||||
builder.append(0b10011111011001);
|
||||
builder.append(-0b11001010001001);
|
||||
expect(builder.toString()).toEqual('y9TzoZ');
|
||||
});
|
||||
|
||||
it('can append values that fit within 20 bits (including sign bit)', () => {
|
||||
builder.append(0b1110010011101110110);
|
||||
builder.append(-0b1011000010100100110);
|
||||
expect(builder.toString()).toEqual('s3zctyiW');
|
||||
});
|
||||
|
||||
it('can append values that fit within 25 bits (including sign bit)', () => {
|
||||
builder.append(0b100010001111011010110111);
|
||||
builder.append(-0b100100111100001110101111);
|
||||
expect(builder.toString()).toEqual('ur7jR/6hvS');
|
||||
});
|
||||
|
||||
it('can append values that fit within 30 bits (including sign bit)', () => {
|
||||
builder.append(0b10001100100001101010001011111);
|
||||
builder.append(-0b11111000011000111110011111101);
|
||||
expect(builder.toString()).toEqual('+lqjyR7v+xhf');
|
||||
});
|
||||
|
||||
it('can append values that fit within 32 bits (including sign bit)', () => {
|
||||
builder.append(0b1001100101000101001011111110011);
|
||||
builder.append(-0b1101101101011000110011001110000);
|
||||
expect(builder.toString()).toEqual('m/rq0sChnzx1tD');
|
||||
});
|
||||
|
||||
it('can handle multiple operations', () => {
|
||||
builder
|
||||
.markLines(3)
|
||||
.startSegment(4)
|
||||
.append(2)
|
||||
.append(2)
|
||||
.append(0)
|
||||
.append(2345)
|
||||
.startSegment(12)
|
||||
.append(987543)
|
||||
.markLines(1)
|
||||
.startSegment(0);
|
||||
expect(builder.toString()).toEqual(';;;IEEAyyE,Yu5o8B;A');
|
||||
});
|
|
@ -1,113 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const Generator = require('../Generator');
|
||||
|
||||
const {objectContaining} = expect;
|
||||
|
||||
let generator;
|
||||
beforeEach(() => {
|
||||
generator = new Generator();
|
||||
});
|
||||
|
||||
it('adds file name and source code when starting a file', () => {
|
||||
const file1 = 'just/a/file';
|
||||
const file2 = 'another/file';
|
||||
const source1 = 'var a = 1;';
|
||||
const source2 = 'var a = 2;';
|
||||
|
||||
generator.startFile(file1, source1);
|
||||
generator.startFile(file2, source2);
|
||||
|
||||
expect(generator.toMap())
|
||||
.toEqual(objectContaining({
|
||||
sources: [file1, file2],
|
||||
sourcesContent: [source1, source2],
|
||||
}));
|
||||
});
|
||||
|
||||
it('throws when adding a mapping without starting a file', () => {
|
||||
expect(() => generator.addSimpleMapping(1, 2)).toThrow();
|
||||
});
|
||||
|
||||
it('throws when adding a mapping after ending a file', () => {
|
||||
generator.startFile('apples', 'pears');
|
||||
generator.endFile();
|
||||
expect(() => generator.addSimpleMapping(1, 2)).toThrow();
|
||||
});
|
||||
|
||||
it('can add a mapping for generated code without corresponding original source', () => {
|
||||
generator.startFile('apples', 'pears');
|
||||
generator.addSimpleMapping(12, 87);
|
||||
expect(generator.toMap())
|
||||
.toEqual(objectContaining({
|
||||
mappings: ';;;;;;;;;;;uF',
|
||||
}));
|
||||
});
|
||||
|
||||
it('can add a mapping with corresponding location in the original source', () => {
|
||||
generator.startFile('apples', 'pears');
|
||||
generator.addSourceMapping(2, 3, 456, 7);
|
||||
expect(generator.toMap())
|
||||
.toEqual(objectContaining({
|
||||
mappings: ';GAucO',
|
||||
}));
|
||||
});
|
||||
|
||||
it('can add a mapping with source location and symbol name', () => {
|
||||
generator.startFile('apples', 'pears');
|
||||
generator.addNamedSourceMapping(9, 876, 54, 3, 'arbitrary');
|
||||
expect(generator.toMap())
|
||||
.toEqual(objectContaining({
|
||||
mappings: ';;;;;;;;42BAqDGA',
|
||||
names: ['arbitrary'],
|
||||
}));
|
||||
});
|
||||
|
||||
describe('full map generation', () => {
|
||||
beforeEach(() => {
|
||||
generator.startFile('apples', 'pears');
|
||||
generator.addSimpleMapping(1, 2);
|
||||
generator.addNamedSourceMapping(3, 4, 5, 6, 'plums');
|
||||
generator.endFile();
|
||||
generator.startFile('lemons', 'oranges');
|
||||
generator.addNamedSourceMapping(7, 8, 9, 10, 'tangerines');
|
||||
generator.addNamedSourceMapping(11, 12, 13, 14, 'tangerines');
|
||||
generator.addSimpleMapping(15, 16);
|
||||
});
|
||||
|
||||
it('can add multiple mappings for each file', () => {
|
||||
expect(generator.toMap()).toEqual({
|
||||
version: 3,
|
||||
mappings: 'E;;IAIMA;;;;QCIIC;;;;YAIIA;;;;gB',
|
||||
sources: ['apples', 'lemons'],
|
||||
sourcesContent: ['pears', 'oranges'],
|
||||
names: ['plums', 'tangerines'],
|
||||
});
|
||||
});
|
||||
|
||||
it('can add a `file` property to the map', () => {
|
||||
expect(generator.toMap('arbitrary'))
|
||||
.toEqual(objectContaining({
|
||||
file: 'arbitrary',
|
||||
}));
|
||||
});
|
||||
|
||||
it('supports direct JSON serialization', () => {
|
||||
expect(JSON.parse(generator.toString())).toEqual(generator.toMap());
|
||||
});
|
||||
|
||||
it('supports direct JSON serialization with a file name', () => {
|
||||
const file = 'arbitrary/file';
|
||||
expect(JSON.parse(generator.toString(file))).toEqual(generator.toMap(file));
|
||||
});
|
||||
});
|
|
@ -1,85 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const Generator = require('../Generator');
|
||||
const {compactMapping, fromRawMappings} = require('..');
|
||||
|
||||
describe('flattening mappings / compacting', () => {
|
||||
it('flattens simple mappings', () => {
|
||||
expect(compactMapping({generated: {line: 12, column: 34}}))
|
||||
.toEqual([12, 34]);
|
||||
});
|
||||
|
||||
it('flattens mappings with a source location', () => {
|
||||
expect(compactMapping({
|
||||
generated: {column: 34, line: 12},
|
||||
original: {column: 78, line: 56},
|
||||
})).toEqual([12, 34, 56, 78]);
|
||||
});
|
||||
|
||||
it('flattens mappings with a source location and a symbol name', () => {
|
||||
expect(compactMapping({
|
||||
generated: {column: 34, line: 12},
|
||||
name: 'arbitrary',
|
||||
original: {column: 78, line: 56},
|
||||
})).toEqual([12, 34, 56, 78, 'arbitrary']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('build map from raw mappings', () => {
|
||||
it('returns a `Generator` instance', () => {
|
||||
expect(fromRawMappings([])).toBeInstanceOf(Generator);
|
||||
});
|
||||
|
||||
it('returns a working source map containing all mappings', () => {
|
||||
const input = [{
|
||||
code: lines(11),
|
||||
map: [
|
||||
[1, 2],
|
||||
[3, 4, 5, 6, 'apples'],
|
||||
[7, 8, 9, 10],
|
||||
[11, 12, 13, 14, 'pears'],
|
||||
],
|
||||
sourceCode: 'code1',
|
||||
sourcePath: 'path1',
|
||||
}, {
|
||||
code: lines(3),
|
||||
map: [
|
||||
[1, 2],
|
||||
[3, 4, 15, 16, 'bananas'],
|
||||
],
|
||||
sourceCode: 'code2',
|
||||
sourcePath: 'path2',
|
||||
}, {
|
||||
code: lines(23),
|
||||
map: [
|
||||
[11, 12],
|
||||
[13, 14, 15, 16, 'bananas'],
|
||||
[17, 18, 19, 110],
|
||||
[21, 112, 113, 114, 'pears'],
|
||||
],
|
||||
sourceCode: 'code3',
|
||||
sourcePath: 'path3',
|
||||
}];
|
||||
|
||||
expect(fromRawMappings(input).toMap())
|
||||
.toEqual({
|
||||
mappings: 'E;;IAIMA;;;;QAII;;;;YAIIC;E;;ICEEC;;;;;;;;;;;Y;;cCAAA;;;;kBAI8F;;;;gHA8FID',
|
||||
names: ['apples', 'pears', 'bananas'],
|
||||
sources: ['path1', 'path2', 'path3'],
|
||||
sourcesContent: ['code1', 'code2', 'code3'],
|
||||
version: 3,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const lines = n => Array(n).join('\n');
|
|
@ -1,127 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
/**
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*
|
||||
* Based on the Base 64 VLQ implementation in Closure Compiler:
|
||||
* https://git.io/vymuA
|
||||
*
|
||||
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following
|
||||
* disclaimer in the documentation and/or other materials provided
|
||||
* with the distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* @copyright
|
||||
*/
|
||||
|
||||
/* eslint-disable no-bitwise */
|
||||
|
||||
'use strict';
|
||||
|
||||
// A map of values to characters for the b64 encoding
|
||||
const CHAR_MAP = [
|
||||
0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
|
||||
0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50,
|
||||
0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
|
||||
0x59, 0x5a, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
|
||||
0x67, 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e,
|
||||
0x6f, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76,
|
||||
0x77, 0x78, 0x79, 0x7a, 0x30, 0x31, 0x32, 0x33,
|
||||
0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x2b, 0x2f,
|
||||
];
|
||||
|
||||
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
||||
// length quantities we use in the source map spec, the first bit is the sign,
|
||||
// the next four bits are the actual value, and the 6th bit is the
|
||||
// continuation bit. The continuation bit tells us whether there are more
|
||||
// digits in this value following this digit.
|
||||
//
|
||||
// Continuation
|
||||
// | Sign
|
||||
// | |
|
||||
// V V
|
||||
// 101011
|
||||
|
||||
const VLQ_BASE_SHIFT = 5;
|
||||
|
||||
// binary: 100000
|
||||
const VLQ_BASE = 1 << VLQ_BASE_SHIFT;
|
||||
|
||||
// binary: 011111
|
||||
const VLQ_BASE_MASK = VLQ_BASE - 1;
|
||||
|
||||
// binary: 100000
|
||||
const VLQ_CONTINUATION_BIT = VLQ_BASE;
|
||||
|
||||
/**
|
||||
* Converts from a two-complement value to a value where the sign bit is
|
||||
* placed in the least significant bit. For example, as decimals:
|
||||
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
|
||||
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
|
||||
*/
|
||||
function toVLQSigned(value) {
|
||||
return value < 0
|
||||
? ((-value) << 1) + 1
|
||||
: (value << 1) + 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes a number to base64 VLQ format and appends it to the passed-in buffer
|
||||
*
|
||||
* DON'T USE COMPOUND OPERATORS (eg `>>>=`) ON `let`-DECLARED VARIABLES!
|
||||
* V8 WILL DEOPTIMIZE THIS FUNCTION AND MAP CREATION WILL BE 25% SLOWER!
|
||||
*
|
||||
* DON'T ADD MORE COMMENTS TO THIS FUNCTION TO KEEP ITS LENGTH SHORT ENOUGH FOR
|
||||
* V8 OPTIMIZATION!
|
||||
*/
|
||||
function encode(value: number, buffer: Buffer, position: number): number {
|
||||
let vlq = toVLQSigned(value);
|
||||
let digit;
|
||||
do {
|
||||
digit = vlq & VLQ_BASE_MASK;
|
||||
vlq >>>= VLQ_BASE_SHIFT;
|
||||
if (vlq > 0) {
|
||||
// There are still more digits in this value, so we must make sure the
|
||||
// continuation bit is marked.
|
||||
digit |= VLQ_CONTINUATION_BIT;
|
||||
}
|
||||
buffer[position++] = CHAR_MAP[digit];
|
||||
} while (vlq > 0);
|
||||
|
||||
return position;
|
||||
}
|
||||
|
||||
module.exports = encode;
|
|
@ -1 +0,0 @@
|
|||
{"main": "source-map.js"}
|
|
@ -1,104 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const Generator = require('./Generator');
|
||||
|
||||
import type ModuleTransport from '../../lib/ModuleTransport';
|
||||
import type {RawMapping as BabelRawMapping} from 'babel-generator';
|
||||
|
||||
type GeneratedCodeMapping = [number, number];
|
||||
type SourceMapping = [number, number, number, number];
|
||||
type SourceMappingWithName = [number, number, number, number, string];
|
||||
|
||||
export type RawMapping =
|
||||
SourceMappingWithName | SourceMapping | GeneratedCodeMapping;
|
||||
|
||||
/**
|
||||
* Creates a source map from modules with "raw mappings", i.e. an array of
|
||||
* tuples with either 2, 4, or 5 elements:
|
||||
* generated line, generated column, source line, source line, symbol name.
|
||||
*/
|
||||
function fromRawMappings(modules: Array<ModuleTransport>): Generator {
|
||||
const generator = new Generator();
|
||||
let carryOver = 0;
|
||||
|
||||
for (var j = 0, o = modules.length; j < o; ++j) {
|
||||
var module = modules[j];
|
||||
var {code, map} = module;
|
||||
|
||||
if (Array.isArray(map)) {
|
||||
addMappingsForFile(generator, map, module, carryOver);
|
||||
} else if (map != null) {
|
||||
throw new Error(
|
||||
`Unexpected module with full source map found: ${module.sourcePath}`
|
||||
);
|
||||
}
|
||||
|
||||
carryOver += countLines(code);
|
||||
}
|
||||
|
||||
return generator;
|
||||
}
|
||||
|
||||
function compactMapping(mapping: BabelRawMapping): RawMapping {
|
||||
const {column, line} = mapping.generated;
|
||||
const {name, original} = mapping;
|
||||
|
||||
if (original == null) {
|
||||
return [line, column];
|
||||
}
|
||||
|
||||
if (typeof name !== 'string') {
|
||||
return [line, column, original.line, original.column];
|
||||
}
|
||||
|
||||
return [line, column, original.line, original.column, name];
|
||||
}
|
||||
|
||||
function addMappingsForFile(generator, mappings, module, carryOver) {
|
||||
generator.startFile(module.sourcePath, module.sourceCode);
|
||||
|
||||
const columnOffset = module.code.indexOf('{') + 1;
|
||||
for (let i = 0, n = mappings.length; i < n; ++i) {
|
||||
addMapping(generator, mappings[i], carryOver, columnOffset);
|
||||
}
|
||||
|
||||
generator.endFile();
|
||||
|
||||
}
|
||||
|
||||
function addMapping(generator, mapping, carryOver, columnOffset) {
|
||||
const n = mapping.length;
|
||||
const line = mapping[0] + carryOver;
|
||||
// lines start at 1, columns start at 0
|
||||
const column = mapping[0] === 1 ? mapping[1] + columnOffset : mapping[1];
|
||||
if (n === 2) {
|
||||
generator.addSimpleMapping(line, column);
|
||||
} else if (n === 4) {
|
||||
// $FlowIssue #15579526
|
||||
generator.addSourceMapping(line, column, mapping[2], mapping[3]);
|
||||
} else if (n === 5) {
|
||||
generator.addNamedSourceMapping(
|
||||
// $FlowIssue #15579526
|
||||
line, column, mapping[2], mapping[3], mapping[4]);
|
||||
} else {
|
||||
throw new Error(`Invalid mapping: [${mapping.join(', ')}]`);
|
||||
}
|
||||
}
|
||||
|
||||
function countLines(string) {
|
||||
return string.split('\n').length;
|
||||
}
|
||||
|
||||
exports.fromRawMappings = fromRawMappings;
|
||||
exports.compactMapping = compactMapping;
|
|
@ -1,157 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const babel = require('babel-core');
|
||||
const babelGenerate = require('babel-generator').default;
|
||||
const babylon = require('babylon');
|
||||
|
||||
import type {AssetDescriptor} from '.';
|
||||
import type {ModuleTransportLike} from '../shared/types.flow';
|
||||
|
||||
type SubTree<T: ModuleTransportLike> = (
|
||||
moduleTransport: T,
|
||||
moduleTransportsByPath: Map<string, T>,
|
||||
) => Generator<number, void, void>;
|
||||
|
||||
const assetPropertyBlacklist = new Set([
|
||||
'files',
|
||||
'fileSystemLocation',
|
||||
'path',
|
||||
]);
|
||||
|
||||
const ASSET_REGISTRY_PATH = 'react-native/Libraries/Image/AssetRegistry';
|
||||
|
||||
function generateAssetCodeFileAst(assetDescriptor: AssetDescriptor): Object {
|
||||
const properDescriptor = filterObject(assetDescriptor, assetPropertyBlacklist);
|
||||
const descriptorAst = babylon.parseExpression(JSON.stringify(properDescriptor));
|
||||
const t = babel.types;
|
||||
const moduleExports = t.memberExpression(t.identifier('module'), t.identifier('exports'));
|
||||
const requireCall =
|
||||
t.callExpression(t.identifier('require'), [t.stringLiteral(ASSET_REGISTRY_PATH)]);
|
||||
const registerAssetFunction = t.memberExpression(requireCall, t.identifier('registerAsset'));
|
||||
const registerAssetCall = t.callExpression(registerAssetFunction, [descriptorAst]);
|
||||
return t.file(t.program([
|
||||
t.expressionStatement(t.assignmentExpression('=', moduleExports, registerAssetCall)),
|
||||
]));
|
||||
}
|
||||
|
||||
function generateAssetTransformResult(assetDescriptor: AssetDescriptor): {|
|
||||
code: string,
|
||||
dependencies: Array<string>,
|
||||
dependencyOffsets: Array<number>,
|
||||
|} {
|
||||
const {code} = babelGenerate(
|
||||
generateAssetCodeFileAst(assetDescriptor),
|
||||
{comments: false, compact: true},
|
||||
);
|
||||
const dependencies = [ASSET_REGISTRY_PATH];
|
||||
const dependencyOffsets = [code.indexOf(ASSET_REGISTRY_PATH) - 1];
|
||||
return {code, dependencies, dependencyOffsets};
|
||||
}
|
||||
|
||||
// Test extension against all types supported by image-size module.
|
||||
// If it's not one of these, we won't treat it as an image.
|
||||
function isAssetTypeAnImage(type: string): boolean {
|
||||
return [
|
||||
'png', 'jpg', 'jpeg', 'bmp', 'gif', 'webp', 'psd', 'svg', 'tiff',
|
||||
].indexOf(type) !== -1;
|
||||
}
|
||||
|
||||
function filterObject(object, blacklist) {
|
||||
const copied = Object.assign({}, object);
|
||||
for (const key of blacklist) {
|
||||
delete copied[key];
|
||||
}
|
||||
return copied;
|
||||
}
|
||||
|
||||
function createRamBundleGroups<T: ModuleTransportLike>(
|
||||
ramGroups: $ReadOnlyArray<string>,
|
||||
groupableModules: $ReadOnlyArray<T>,
|
||||
subtree: SubTree<T>,
|
||||
): Map<number, Set<number>> {
|
||||
// build two maps that allow to lookup module data
|
||||
// by path or (numeric) module id;
|
||||
const byPath = new Map();
|
||||
const byId = new Map();
|
||||
groupableModules.forEach(m => {
|
||||
byPath.set(m.sourcePath, m);
|
||||
byId.set(m.id, m.sourcePath);
|
||||
});
|
||||
|
||||
// build a map of group root IDs to an array of module IDs in the group
|
||||
const result: Map<number, Set<number>> = new Map(
|
||||
ramGroups
|
||||
.map(modulePath => {
|
||||
const root = byPath.get(modulePath);
|
||||
if (root == null) {
|
||||
throw Error(`Group root ${modulePath} is not part of the bundle`);
|
||||
}
|
||||
return [
|
||||
root.id,
|
||||
// `subtree` yields the IDs of all transitive dependencies of a module
|
||||
new Set(subtree(root, byPath)),
|
||||
];
|
||||
})
|
||||
);
|
||||
|
||||
if (ramGroups.length > 1) {
|
||||
// build a map of all grouped module IDs to an array of group root IDs
|
||||
const all = new ArrayMap();
|
||||
for (const [parent, children] of result) {
|
||||
for (const module of children) {
|
||||
all.get(module).push(parent);
|
||||
}
|
||||
}
|
||||
|
||||
// find all module IDs that are part of more than one group
|
||||
const doubles = filter(all, ([, parents]) => parents.length > 1);
|
||||
for (const [moduleId, parents] of doubles) {
|
||||
const parentNames = parents.map(byId.get, byId);
|
||||
const lastName = parentNames.pop();
|
||||
throw new Error(
|
||||
`Module ${byId.get(moduleId) || moduleId} belongs to groups ${
|
||||
parentNames.join(', ')}, and ${String(lastName)
|
||||
}. Ensure that each module is only part of one group.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function * filter(iterator, predicate) {
|
||||
for (const value of iterator) {
|
||||
if (predicate(value)) {
|
||||
yield value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ArrayMap extends Map {
|
||||
get(key) {
|
||||
let array = super.get(key);
|
||||
if (!array) {
|
||||
array = [];
|
||||
this.set(key, array);
|
||||
}
|
||||
return array;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createRamBundleGroups,
|
||||
generateAssetCodeFileAst,
|
||||
generateAssetTransformResult,
|
||||
isAssetTypeAnImage,
|
||||
};
|
|
@ -1,13 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
// Bug with Jest because we're going to the node_modules that is a sibling
|
||||
// of what jest thinks our root (the dir with the package.json) should be.
|
||||
module.exports = require.requireActual('lodash');
|
|
@ -1,13 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
module.exports = function(data, callback) {
|
||||
callback(null, {});
|
||||
};
|
|
@ -1,95 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest
|
||||
.unmock('stream')
|
||||
.unmock('crypto')
|
||||
.unmock('../../lib/ModuleTransport')
|
||||
.unmock('../');
|
||||
|
||||
const fs = {writeFileSync: jest.fn()};
|
||||
const temp = {path: () => '/arbitrary/path'};
|
||||
const workerFarm = jest.fn();
|
||||
jest.setMock('fs', fs);
|
||||
jest.setMock('temp', temp);
|
||||
jest.setMock('worker-farm', workerFarm);
|
||||
jest.setMock('../../worker-farm', workerFarm);
|
||||
|
||||
var Transformer = require('../');
|
||||
|
||||
const {any} = jasmine;
|
||||
const {Readable} = require('stream');
|
||||
|
||||
describe('Transformer', function() {
|
||||
let workers, Cache;
|
||||
const fileName = '/an/arbitrary/file.js';
|
||||
const localPath = 'arbitrary/file.js';
|
||||
const transformModulePath = __filename;
|
||||
|
||||
beforeEach(function() {
|
||||
Cache = jest.fn();
|
||||
Cache.prototype.get = jest.fn((a, b, c) => c());
|
||||
|
||||
fs.writeFileSync.mockClear();
|
||||
workerFarm.mockClear();
|
||||
workerFarm.mockImplementation((opts, path, methods) => {
|
||||
const api = workers = {};
|
||||
methods.forEach(method => {api[method] = jest.fn();});
|
||||
return {methods: api, stdout: new Readable({read() {}}), stderr: new Readable({read() {}})};
|
||||
});
|
||||
});
|
||||
|
||||
it('passes transform module path, file path, source code' +
|
||||
' to the worker farm when transforming', () => {
|
||||
const transformOptions = {arbitrary: 'options'};
|
||||
const code = 'arbitrary(code)';
|
||||
new Transformer(transformModulePath).transformFile(fileName, localPath, code, transformOptions);
|
||||
expect(workers.transformAndExtractDependencies).toBeCalledWith(
|
||||
transformModulePath,
|
||||
fileName,
|
||||
localPath,
|
||||
code,
|
||||
transformOptions,
|
||||
any(Function),
|
||||
);
|
||||
});
|
||||
|
||||
it('should add file info to parse errors', function() {
|
||||
const transformer = new Transformer(transformModulePath);
|
||||
var message = 'message';
|
||||
var snippet = 'snippet';
|
||||
|
||||
workers.transformAndExtractDependencies.mockImplementation(
|
||||
function(transformPath, filename, localPth, code, opts, callback) {
|
||||
var babelError = new SyntaxError(message);
|
||||
babelError.type = 'SyntaxError';
|
||||
babelError.description = message;
|
||||
babelError.loc = {
|
||||
line: 2,
|
||||
column: 15,
|
||||
};
|
||||
babelError.codeFrame = snippet;
|
||||
callback(babelError);
|
||||
},
|
||||
);
|
||||
|
||||
expect.assertions(7);
|
||||
return transformer.transformFile(fileName, localPath, '', {})
|
||||
.catch(function(error) {
|
||||
expect(error.type).toEqual('TransformError');
|
||||
expect(error.message).toBe('SyntaxError ' + message);
|
||||
expect(error.lineNumber).toBe(2);
|
||||
expect(error.column).toBe(15);
|
||||
expect(error.filename).toBe(fileName);
|
||||
expect(error.description).toBe(message);
|
||||
expect(error.snippet).toBe(snippet);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,204 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const Logger = require('../Logger');
|
||||
|
||||
const debug = require('debug')('RNP:JStransformer');
|
||||
const denodeify: Denodeify = require('denodeify');
|
||||
const invariant = require('fbjs/lib/invariant');
|
||||
const path = require('path');
|
||||
const util = require('util');
|
||||
const workerFarm = require('../worker-farm');
|
||||
|
||||
import type {Data as TransformData, Options as WorkerOptions} from './worker';
|
||||
import type {LocalPath} from '../node-haste/lib/toLocalPath';
|
||||
import type {MappingsMap} from '../lib/SourceMap';
|
||||
import typeof {minify as Minify, transformAndExtractDependencies as TransformAndExtractDependencies} from './worker';
|
||||
|
||||
type CB<T> = (?Error, ?T) => mixed;
|
||||
type Denodeify =
|
||||
& (<A, B, C, T>((A, B, C, CB<T>) => void) => (A, B, C) => Promise<T>)
|
||||
& (<A, B, C, D, E, T>((A, B, C, D, E, CB<T>) => void) => (A, B, C, D, E) => Promise<T>);
|
||||
|
||||
// Avoid memory leaks caused in workers. This number seems to be a good enough number
|
||||
// to avoid any memory leak while not slowing down initial builds.
|
||||
// TODO(amasad): Once we get bundle splitting, we can drive this down a bit more.
|
||||
const MAX_CALLS_PER_WORKER = 600;
|
||||
|
||||
// Worker will timeout if one of the callers timeout.
|
||||
const TRANSFORM_TIMEOUT_INTERVAL = 301000;
|
||||
|
||||
// How may times can we tolerate failures from the worker.
|
||||
const MAX_RETRIES = 2;
|
||||
|
||||
function makeFarm(worker, methods, timeout, maxConcurrentWorkers) {
|
||||
return workerFarm(
|
||||
{
|
||||
autoStart: true,
|
||||
execArgv: [],
|
||||
maxConcurrentCallsPerWorker: 1,
|
||||
maxConcurrentWorkers,
|
||||
maxCallsPerWorker: MAX_CALLS_PER_WORKER,
|
||||
maxCallTime: timeout,
|
||||
maxRetries: MAX_RETRIES,
|
||||
},
|
||||
worker,
|
||||
methods,
|
||||
);
|
||||
}
|
||||
|
||||
type Reporters = {
|
||||
+stdoutChunk: (chunk: string) => mixed,
|
||||
+stderrChunk: (chunk: string) => mixed,
|
||||
};
|
||||
|
||||
class Transformer {
|
||||
|
||||
_workers: {[name: string]: Function};
|
||||
_transformModulePath: string;
|
||||
_transform: (
|
||||
transform: string,
|
||||
filename: string,
|
||||
localPath: LocalPath,
|
||||
sourceCode: string,
|
||||
options: WorkerOptions,
|
||||
) => Promise<TransformData>;
|
||||
minify: (
|
||||
filename: string,
|
||||
code: string,
|
||||
sourceMap: MappingsMap,
|
||||
) => Promise<{code: string, map: MappingsMap}>;
|
||||
|
||||
constructor(
|
||||
transformModulePath: string,
|
||||
maxWorkerCount: number,
|
||||
reporters: Reporters,
|
||||
workerPath: ?string,
|
||||
) {
|
||||
invariant(path.isAbsolute(transformModulePath), 'transform module path should be absolute');
|
||||
this._transformModulePath = transformModulePath;
|
||||
|
||||
const farm = makeFarm(
|
||||
workerPath || require.resolve('./worker'),
|
||||
['minify', 'transformAndExtractDependencies'],
|
||||
TRANSFORM_TIMEOUT_INTERVAL,
|
||||
maxWorkerCount,
|
||||
);
|
||||
farm.stdout.on('data', chunk => {
|
||||
reporters.stdoutChunk(chunk.toString('utf8'));
|
||||
});
|
||||
farm.stderr.on('data', chunk => {
|
||||
reporters.stderrChunk(chunk.toString('utf8'));
|
||||
});
|
||||
|
||||
this._workers = farm.methods;
|
||||
this._transform = denodeify((this._workers.transformAndExtractDependencies: TransformAndExtractDependencies));
|
||||
this.minify = denodeify((this._workers.minify: Minify));
|
||||
}
|
||||
|
||||
kill() {
|
||||
this._workers && workerFarm.end(this._workers);
|
||||
}
|
||||
|
||||
transformFile(
|
||||
fileName: string,
|
||||
localPath: LocalPath,
|
||||
code: string,
|
||||
options: WorkerOptions) {
|
||||
if (!this._transform) {
|
||||
return Promise.reject(new Error('No transform module'));
|
||||
}
|
||||
debug('transforming file', fileName);
|
||||
return this
|
||||
._transform(
|
||||
this._transformModulePath,
|
||||
fileName,
|
||||
localPath,
|
||||
code,
|
||||
options,
|
||||
)
|
||||
.then(data => {
|
||||
Logger.log(data.transformFileStartLogEntry);
|
||||
Logger.log(data.transformFileEndLogEntry);
|
||||
debug('done transforming file', fileName);
|
||||
return data.result;
|
||||
})
|
||||
.catch(error => {
|
||||
if (error.type === 'TimeoutError') {
|
||||
const timeoutErr = new Error(
|
||||
`TimeoutError: transforming ${fileName} took longer than ` +
|
||||
`${TRANSFORM_TIMEOUT_INTERVAL / 1000} seconds.\n` +
|
||||
'You can adjust timeout via the \'transformTimeoutInterval\' option'
|
||||
);
|
||||
/* $FlowFixMe: monkey-patch Error */
|
||||
timeoutErr.type = 'TimeoutError';
|
||||
throw timeoutErr;
|
||||
} else if (error.type === 'ProcessTerminatedError') {
|
||||
const uncaughtError = new Error(
|
||||
'Uncaught error in the transformer worker: ' +
|
||||
this._transformModulePath
|
||||
);
|
||||
/* $FlowFixMe: monkey-patch Error */
|
||||
uncaughtError.type = 'ProcessTerminatedError';
|
||||
throw uncaughtError;
|
||||
}
|
||||
|
||||
throw formatError(error, fileName);
|
||||
});
|
||||
}
|
||||
|
||||
static TransformError;
|
||||
}
|
||||
|
||||
Transformer.TransformError = TransformError;
|
||||
|
||||
function TransformError() {
|
||||
Error.captureStackTrace && Error.captureStackTrace(this, TransformError);
|
||||
}
|
||||
util.inherits(TransformError, SyntaxError);
|
||||
|
||||
function formatError(err, filename) {
|
||||
if (err.loc) {
|
||||
return formatBabelError(err, filename);
|
||||
} else {
|
||||
return formatGenericError(err, filename);
|
||||
}
|
||||
}
|
||||
|
||||
function formatGenericError(err, filename) {
|
||||
var msg = 'TransformError: ' + filename + ': ' + err.message;
|
||||
var error = new TransformError();
|
||||
var stack = (err.stack || '').split('\n').slice(0, -1);
|
||||
stack.push(msg);
|
||||
error.stack = stack.join('\n');
|
||||
error.message = msg;
|
||||
error.type = 'TransformError';
|
||||
error.lineNumber = 0;
|
||||
error.description = '';
|
||||
return error;
|
||||
}
|
||||
|
||||
function formatBabelError(err, filename) {
|
||||
var error = new TransformError();
|
||||
error.type = 'TransformError';
|
||||
error.message = (err.type || error.type) + ' ' + err.message;
|
||||
error.stack = err.stack;
|
||||
error.snippet = err.codeFrame;
|
||||
error.lineNumber = err.loc.line;
|
||||
error.column = err.loc.column;
|
||||
error.filename = filename;
|
||||
error.description = err.message;
|
||||
return error;
|
||||
}
|
||||
|
||||
module.exports = Transformer;
|
|
@ -1,19 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const UGLIFY_JS_OUTPUT_OPTIONS = {
|
||||
ascii_only: true,
|
||||
screw_ie8: true,
|
||||
};
|
||||
|
||||
module.exports = {UGLIFY_JS_OUTPUT_OPTIONS};
|
|
@ -1,122 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
const babel = require('babel-core');
|
||||
const constantFolding = require('../constant-folding');
|
||||
|
||||
function parse(code) {
|
||||
return babel.transform(code, {code: false, babelrc: false, compact: true});
|
||||
}
|
||||
|
||||
const babelOptions = {
|
||||
babelrc: false,
|
||||
compact: true,
|
||||
retainLines: false,
|
||||
};
|
||||
|
||||
function normalize({code}) {
|
||||
return babel.transform(code, babelOptions).code;
|
||||
}
|
||||
|
||||
describe('constant expressions', () => {
|
||||
it('can optimize conditional expressions with constant conditions', () => {
|
||||
const code = `
|
||||
a(
|
||||
'production'=="production",
|
||||
'production'!=='development',
|
||||
false && 1 || 0 || 2,
|
||||
true || 3,
|
||||
'android'==='ios' ? null : {},
|
||||
'android'==='android' ? {a:1} : {a:0},
|
||||
'foo'==='bar' ? b : c,
|
||||
f() ? g() : h()
|
||||
);`;
|
||||
expect(normalize(constantFolding('arbitrary.js', parse(code))))
|
||||
.toEqual('a(true,true,2,true,{},{a:1},c,f()?g():h());');
|
||||
});
|
||||
|
||||
it('can optimize ternary expressions with constant conditions', () => {
|
||||
const code =
|
||||
`var a = true ? 1 : 2;
|
||||
var b = 'android' == 'android'
|
||||
? ('production' != 'production' ? 'a' : 'A')
|
||||
: 'i';`;
|
||||
expect(normalize(constantFolding('arbitrary.js', parse(code))))
|
||||
.toEqual('var a=1;var b=\'A\';');
|
||||
});
|
||||
|
||||
it('can optimize logical operator expressions with constant conditions', () => {
|
||||
const code = `
|
||||
var a = true || 1;
|
||||
var b = 'android' == 'android' &&
|
||||
'production' != 'production' || null || "A";`;
|
||||
expect(normalize(constantFolding('arbitrary.js', parse(code))))
|
||||
.toEqual('var a=true;var b="A";');
|
||||
});
|
||||
|
||||
it('can optimize logical operators with partly constant operands', () => {
|
||||
const code = `
|
||||
var a = "truthy" || z();
|
||||
var b = "truthy" && z();
|
||||
var c = null && z();
|
||||
var d = null || z();
|
||||
var e = !1 && z();
|
||||
`;
|
||||
expect(normalize(constantFolding('arbitrary.js', parse(code))))
|
||||
.toEqual('var a="truthy";var b=z();var c=null;var d=z();var e=false;');
|
||||
});
|
||||
|
||||
it('can remode an if statement with a falsy constant test', () => {
|
||||
const code = `
|
||||
if ('production' === 'development' || false) {
|
||||
var a = 1;
|
||||
}
|
||||
`;
|
||||
expect(normalize(constantFolding('arbitrary.js', parse(code))))
|
||||
.toEqual('');
|
||||
});
|
||||
|
||||
it('can optimize if-else-branches with constant conditions', () => {
|
||||
const code = `
|
||||
if ('production' == 'development') {
|
||||
var a = 1;
|
||||
var b = a + 2;
|
||||
} else if ('development' == 'development') {
|
||||
var a = 3;
|
||||
var b = a + 4;
|
||||
} else {
|
||||
var a = 'b';
|
||||
}
|
||||
`;
|
||||
expect(normalize(constantFolding('arbitrary.js', parse(code))))
|
||||
.toEqual('{var a=3;var b=a+4;}');
|
||||
});
|
||||
|
||||
it('can optimize nested if-else constructs', () => {
|
||||
const code = `
|
||||
if ('ios' === "android") {
|
||||
if (true) {
|
||||
require('a');
|
||||
} else {
|
||||
require('b');
|
||||
}
|
||||
} else if ('android' === 'android') {
|
||||
if (true) {
|
||||
require('c');
|
||||
} else {
|
||||
require('d');
|
||||
}
|
||||
}
|
||||
`;
|
||||
expect(normalize(constantFolding('arbitrary.js', parse(code))))
|
||||
.toEqual('{{require(\'c\');}}');
|
||||
});
|
||||
});
|
|
@ -1,112 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const extractDependencies = require('../extract-dependencies');
|
||||
|
||||
describe('Dependency extraction:', () => {
|
||||
it('can extract calls to require', () => {
|
||||
const code = `require('foo/bar');
|
||||
var React = require("React");
|
||||
var A = React.createClass({
|
||||
render: function() {
|
||||
return require ( "Component" );
|
||||
}
|
||||
});
|
||||
require
|
||||
('more');`;
|
||||
const {dependencies, dependencyOffsets} = extractDependencies(code);
|
||||
expect(dependencies)
|
||||
.toEqual(['foo/bar', 'React', 'Component', 'more']);
|
||||
expect(dependencyOffsets).toEqual([8, 46, 147, 203]);
|
||||
});
|
||||
|
||||
it('does not extract require method calls', () => {
|
||||
const code = `
|
||||
require('a');
|
||||
foo.require('b');
|
||||
bar.
|
||||
require ( 'c').require('d');require('e')`;
|
||||
|
||||
const {dependencies, dependencyOffsets} = extractDependencies(code);
|
||||
expect(dependencies).toEqual(['a', 'e']);
|
||||
expect(dependencyOffsets).toEqual([15, 98]);
|
||||
});
|
||||
|
||||
it('does not extract require calls from strings', () => {
|
||||
const code = `require('foo');
|
||||
var React = '\\'require("React")';
|
||||
var a = ' // require("yadda")';
|
||||
var a = ' /* require("yadda") */';
|
||||
var A = React.createClass({
|
||||
render: function() {
|
||||
return require ( "Component" );
|
||||
}
|
||||
});
|
||||
" \\" require('more')";`;
|
||||
|
||||
const {dependencies, dependencyOffsets} = extractDependencies(code);
|
||||
expect(dependencies).toEqual(['foo', 'Component']);
|
||||
expect(dependencyOffsets).toEqual([8, 226]);
|
||||
});
|
||||
|
||||
it('does not extract require calls in comments', () => {
|
||||
const code = `require('foo')//require("not/this")
|
||||
/* A comment here with a require('call') that should not be extracted */require('bar')
|
||||
// ending comment without newline require("baz")`;
|
||||
|
||||
const {dependencies, dependencyOffsets} = extractDependencies(code);
|
||||
expect(dependencies).toEqual(['foo', 'bar']);
|
||||
expect(dependencyOffsets).toEqual([8, 122]);
|
||||
});
|
||||
|
||||
it('deduplicates dependencies', () => {
|
||||
const code = `require('foo');require( "foo" );
|
||||
require("foo");`;
|
||||
|
||||
const {dependencies, dependencyOffsets} = extractDependencies(code);
|
||||
expect(dependencies).toEqual(['foo']);
|
||||
expect(dependencyOffsets).toEqual([8, 24, 47]);
|
||||
});
|
||||
|
||||
it('does not extract calls to function with names that start with "require"', () => {
|
||||
const code = 'arbitraryrequire(\'foo\');';
|
||||
|
||||
const {dependencies, dependencyOffsets} = extractDependencies(code);
|
||||
expect(dependencies).toEqual([]);
|
||||
expect(dependencyOffsets).toEqual([]);
|
||||
});
|
||||
|
||||
it('does not extract calls to require with non-static arguments', () => {
|
||||
const code = 'require(\'foo/\' + bar)';
|
||||
|
||||
const {dependencies, dependencyOffsets} = extractDependencies(code);
|
||||
expect(dependencies).toEqual([]);
|
||||
expect(dependencyOffsets).toEqual([]);
|
||||
});
|
||||
|
||||
it('does not get confused by previous states', () => {
|
||||
// yes, this was a bug
|
||||
const code = 'require("a");/* a comment */ var a = /[a]/.test(\'a\');';
|
||||
|
||||
const {dependencies, dependencyOffsets} = extractDependencies(code);
|
||||
expect(dependencies).toEqual(['a']);
|
||||
expect(dependencyOffsets).toEqual([8]);
|
||||
});
|
||||
|
||||
it('can handle regular expressions', () => {
|
||||
const code = 'require(\'a\'); /["\']/.test(\'foo\'); require("b");';
|
||||
|
||||
const {dependencies, dependencyOffsets} = extractDependencies(code);
|
||||
expect(dependencies).toEqual(['a', 'b']);
|
||||
expect(dependencyOffsets).toEqual([8, 42]);
|
||||
});
|
||||
});
|
|
@ -1,337 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
/* eslint-disable max-len */
|
||||
|
||||
jest.disableAutomock();
|
||||
const inline = require('../inline');
|
||||
const {transform, transformFromAst} = require('babel-core');
|
||||
|
||||
const babelOptions = {
|
||||
babelrc: false,
|
||||
compact: true,
|
||||
};
|
||||
|
||||
function toString(ast) {
|
||||
return normalize(transformFromAst(ast, babelOptions).code);
|
||||
}
|
||||
|
||||
function normalize(code) {
|
||||
return transform(code, babelOptions).code;
|
||||
}
|
||||
|
||||
function toAst(code) {
|
||||
return transform(code, {...babelOptions, code: false}).ast;
|
||||
}
|
||||
|
||||
describe('inline constants', () => {
|
||||
it('replaces __DEV__ in the code', () => {
|
||||
const code = `function a() {
|
||||
var a = __DEV__ ? 1 : 2;
|
||||
var b = a.__DEV__;
|
||||
var c = function __DEV__(__DEV__) {};
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {dev: true});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/__DEV__/, 'true')));
|
||||
});
|
||||
|
||||
it('replaces Platform.OS in the code if Platform is a global', () => {
|
||||
const code = `function a() {
|
||||
var a = Platform.OS;
|
||||
var b = a.Platform.OS;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/Platform\.OS/, '"ios"')));
|
||||
});
|
||||
|
||||
it('replaces Platform.OS in the code if Platform is a top level import', () => {
|
||||
const code = `
|
||||
var Platform = require('Platform');
|
||||
function a() {
|
||||
if (Platform.OS === 'android') a = function() {};
|
||||
var b = a.Platform.OS;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/Platform\.OS/, '"ios"')));
|
||||
});
|
||||
|
||||
it('replaces Platform.OS in the code if Platform is a top level import from react-native', () => {
|
||||
const code = `
|
||||
var Platform = require('react-native').Platform;
|
||||
function a() {
|
||||
if (Platform.OS === 'android') a = function() {};
|
||||
var b = a.Platform.OS;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/Platform\.OS/, '"ios"')));
|
||||
});
|
||||
|
||||
it('replaces require("Platform").OS in the code', () => {
|
||||
const code = `function a() {
|
||||
var a = require('Platform').OS;
|
||||
var b = a.require('Platform').OS;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'android'});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(code.replace(/require\('Platform'\)\.OS/, '"android"')));
|
||||
});
|
||||
|
||||
it('replaces React.Platform.OS in the code if React is a global', () => {
|
||||
const code = `function a() {
|
||||
var a = React.Platform.OS;
|
||||
var b = a.React.Platform.OS;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/React\.Platform\.OS/, '"ios"')));
|
||||
});
|
||||
|
||||
it('replaces ReactNative.Platform.OS in the code if ReactNative is a global', () => {
|
||||
const code = `function a() {
|
||||
var a = ReactNative.Platform.OS;
|
||||
var b = a.ReactNative.Platform.OS;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/ReactNative\.Platform\.OS/, '"ios"')));
|
||||
});
|
||||
|
||||
it('replaces React.Platform.OS in the code if React is a top level import', () => {
|
||||
const code = `
|
||||
var React = require('React');
|
||||
function a() {
|
||||
if (React.Platform.OS === 'android') a = function() {};
|
||||
var b = a.React.Platform.OS;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/React.Platform\.OS/, '"ios"')));
|
||||
});
|
||||
|
||||
it('replaces require("React").Platform.OS in the code', () => {
|
||||
const code = `function a() {
|
||||
var a = require('React').Platform.OS;
|
||||
var b = a.require('React').Platform.OS;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'android'});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(code.replace(/require\('React'\)\.Platform\.OS/, '"android"')));
|
||||
});
|
||||
|
||||
it('replaces ReactNative.Platform.OS in the code if ReactNative is a top level import', () => {
|
||||
const code = `
|
||||
var ReactNative = require('react-native');
|
||||
function a() {
|
||||
if (ReactNative.Platform.OS === 'android') a = function() {};
|
||||
var b = a.ReactNative.Platform.OS;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'android'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/ReactNative.Platform\.OS/, '"android"')));
|
||||
});
|
||||
|
||||
it('replaces require("react-native").Platform.OS in the code', () => {
|
||||
const code = `function a() {
|
||||
var a = require('react-native').Platform.OS;
|
||||
var b = a.require('react-native').Platform.OS;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'android'});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(code.replace(/require\('react-native'\)\.Platform\.OS/, '"android"')));
|
||||
});
|
||||
|
||||
it('inlines Platform.select in the code if Platform is a global and the argument is an object literal', () => {
|
||||
const code = `function a() {
|
||||
var a = Platform.select({ios: 1, android: 2});
|
||||
var b = a.Platform.select({ios: 1, android: 2});
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/Platform\.select[^;]+/, '1')));
|
||||
});
|
||||
|
||||
it('inlines Platform.select in the code if Platform is a global and the argument doesn\'t have target platform in it\'s keys', () => {
|
||||
const code = `function a() {
|
||||
var a = Platform.select({ios: 1, default: 2});
|
||||
var b = a.Platform.select({ios: 1, default: 2});
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'android'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/Platform\.select[^;]+/, '2')));
|
||||
});
|
||||
|
||||
it('replaces Platform.select in the code if Platform is a top level import', () => {
|
||||
const code = `
|
||||
var Platform = require('Platform');
|
||||
function a() {
|
||||
Platform.select({ios: 1, android: 2});
|
||||
var b = a.Platform.select({});
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'android'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/Platform\.select[^;]+/, '2')));
|
||||
});
|
||||
|
||||
it('replaces Platform.select in the code if Platform is a top level import from react-native', () => {
|
||||
const code = `
|
||||
var Platform = require('react-native').Platform;
|
||||
function a() {
|
||||
Platform.select({ios: 1, android: 2});
|
||||
var b = a.Platform.select({});
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/Platform\.select[^;]+/, '1')));
|
||||
});
|
||||
|
||||
it('replaces require("Platform").select in the code', () => {
|
||||
const code = `function a() {
|
||||
var a = require('Platform').select({ios: 1, android: 2});
|
||||
var b = a.require('Platform').select({});
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'android'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/Platform\.select[^;]+/, '2')));
|
||||
});
|
||||
|
||||
it('replaces React.Platform.select in the code if React is a global', () => {
|
||||
const code = `function a() {
|
||||
var a = React.Platform.select({ios: 1, android: 2});
|
||||
var b = a.React.Platform.select({});
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/React\.Platform\.select[^;]+/, '1')));
|
||||
});
|
||||
|
||||
it('replaces ReactNative.Platform.select in the code if ReactNative is a global', () => {
|
||||
const code = `function a() {
|
||||
var a = ReactNative.Platform.select({ios: 1, android: 2});
|
||||
var b = a.ReactNative.Platform.select({});
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(code.replace(/ReactNative\.Platform\.select[^;]+/, '1')),
|
||||
);
|
||||
});
|
||||
|
||||
it('replaces React.Platform.select in the code if React is a top level import', () => {
|
||||
const code = `
|
||||
var React = require('React');
|
||||
function a() {
|
||||
var a = React.Platform.select({ios: 1, android: 2});
|
||||
var b = a.React.Platform.select({});
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'ios'});
|
||||
expect(toString(ast)).toEqual(normalize(code.replace(/React\.Platform\.select[^;]+/, '1')));
|
||||
});
|
||||
|
||||
it('replaces require("React").Platform.select in the code', () => {
|
||||
const code = `function a() {
|
||||
var a = require('React').Platform.select({ios: 1, android: 2});
|
||||
var b = a.require('React').Platform.select({});
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'android'});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(code.replace(/require\('React'\)\.Platform\.select[^;]+/, '2')));
|
||||
});
|
||||
|
||||
it('replaces ReactNative.Platform.select in the code if ReactNative is a top level import', () => {
|
||||
const code = `
|
||||
var ReactNative = require('react-native');
|
||||
function a() {
|
||||
var a = ReactNative.Plaftform.select({ios: 1, android: 2});
|
||||
var b = a.ReactNative.Platform.select;
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'android'});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(code.replace(/ReactNative.Platform\.select[^;]+/, '2')),
|
||||
);
|
||||
});
|
||||
|
||||
it('replaces require("react-native").Platform.select in the code', () => {
|
||||
const code = `
|
||||
var a = require('react-native').Platform.select({ios: 1, android: 2});
|
||||
var b = a.require('react-native').Platform.select({});
|
||||
`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'android'});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(code.replace(/require\('react-native'\)\.Platform\.select[^;]+/, '2')));
|
||||
});
|
||||
|
||||
it('replaces non-existing properties with `undefined`', () => {
|
||||
const code = 'var a = Platform.select({ios: 1, android: 2})';
|
||||
const {ast} = inline('arbitrary.js', {code}, {platform: 'doesnotexist'});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(code.replace(/Platform\.select[^;]+/, 'undefined')));
|
||||
});
|
||||
|
||||
it('replaces process.env.NODE_ENV in the code', () => {
|
||||
const code = `function a() {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
return require('Prod');
|
||||
}
|
||||
return require('Dev');
|
||||
}`;
|
||||
const {ast} = inline('arbitrary.js', {code}, {dev: false});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(code.replace(/process\.env\.NODE_ENV/, '"production"')));
|
||||
});
|
||||
|
||||
it('accepts an AST as input', function() {
|
||||
const code = 'function ifDev(a,b){return __DEV__?a:b;}';
|
||||
const {ast} = inline('arbitrary.hs', {ast: toAst(code)}, {dev: false});
|
||||
expect(toString(ast)).toEqual(code.replace(/__DEV__/, 'false'));
|
||||
});
|
||||
|
||||
it('can work with wrapped modules', () => {
|
||||
const code = `__arbitrary(function() {
|
||||
var Platform = require('react-native').Platform;
|
||||
var a = Platform.OS, b = Platform.select({android: 1, ios: 2});
|
||||
});`;
|
||||
const {ast} = inline(
|
||||
'arbitrary', {code}, {dev: true, platform: 'android', isWrapped: true});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(
|
||||
code
|
||||
.replace(/Platform\.OS/, '"android"')
|
||||
.replace(/Platform\.select[^)]+\)/, 1)
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
it('can work with transformed require calls', () => {
|
||||
const code = `__arbitrary(require, function(arbitraryMapName) {
|
||||
var a = require(arbitraryMapName[123], 'react-native').Platform.OS;
|
||||
});`;
|
||||
const {ast} = inline(
|
||||
'arbitrary', {code}, {dev: true, platform: 'android', isWrapped: true});
|
||||
expect(toString(ast)).toEqual(
|
||||
normalize(code.replace(/require\([^)]+\)\.Platform\.OS/, '"android"')));
|
||||
});
|
||||
|
||||
it('works with flow-declared variables', () => {
|
||||
const stripFlow = require('babel-plugin-transform-flow-strip-types');
|
||||
const code = `declare var __DEV__;
|
||||
const a: boolean = __DEV__;`;
|
||||
|
||||
const transformed = transform(
|
||||
code,
|
||||
{...babelOptions, plugins: [stripFlow, [inline.plugin, {dev: false}]]},
|
||||
).code;
|
||||
|
||||
expect(transformed).toEqual('const a=false;');
|
||||
});
|
||||
|
||||
it('works with flow-declared variables in wrapped modules', () => {
|
||||
const stripFlow = require('babel-plugin-transform-flow-strip-types');
|
||||
const code = `__d(() => {
|
||||
declare var __DEV__;
|
||||
const a: boolean = __DEV__;
|
||||
});`;
|
||||
|
||||
const transformed = transform(
|
||||
code,
|
||||
{...babelOptions, plugins: [stripFlow, [inline.plugin, {dev: true}]]},
|
||||
).code;
|
||||
|
||||
expect(transformed).toEqual('__d(()=>{const a=true;});');
|
||||
});
|
||||
});
|
|
@ -1,57 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const uglify = {
|
||||
minify: jest.fn(code => {
|
||||
return {
|
||||
code: code.replace(/(^|\W)\s+/g, '$1'),
|
||||
map: {},
|
||||
};
|
||||
}),
|
||||
};
|
||||
jest.setMock('uglify-js', uglify);
|
||||
|
||||
const minify = require('../minify');
|
||||
const {objectContaining} = jasmine;
|
||||
|
||||
describe('Minification:', () => {
|
||||
const filename = '/arbitrary/file.js';
|
||||
const code = 'arbitrary(code)';
|
||||
let map;
|
||||
|
||||
beforeEach(() => {
|
||||
uglify.minify.mockClear();
|
||||
uglify.minify.mockReturnValue({code: '', map: '{}'});
|
||||
map = {version: 3, sources: ['?'], mappings: ''};
|
||||
});
|
||||
|
||||
it('passes file name, code, and source map to `uglify`', () => {
|
||||
minify(filename, code, map);
|
||||
expect(uglify.minify).toBeCalledWith(code, objectContaining({
|
||||
fromString: true,
|
||||
inSourceMap: map,
|
||||
outSourceMap: true,
|
||||
}));
|
||||
});
|
||||
|
||||
it('returns the code provided by uglify', () => {
|
||||
uglify.minify.mockReturnValue({code, map: '{}'});
|
||||
const result = minify('', '', {});
|
||||
expect(result.code).toBe(code);
|
||||
});
|
||||
|
||||
it('parses the source map object provided by uglify and sets the sources property', () => {
|
||||
uglify.minify.mockReturnValue({map: JSON.stringify(map), code: ''});
|
||||
const result = minify(filename, '', {});
|
||||
expect(result.map).toEqual({...map, sources: [filename]});
|
||||
});
|
||||
});
|
|
@ -1,226 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
jest.mock('../constant-folding');
|
||||
jest.mock('../extract-dependencies');
|
||||
jest.mock('../inline');
|
||||
jest.mock('../minify');
|
||||
|
||||
const {objectContaining} = jasmine;
|
||||
|
||||
describe('code transformation worker:', () => {
|
||||
let transformCode;
|
||||
|
||||
let extractDependencies, transformer;
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
({transformCode} = require('..'));
|
||||
extractDependencies =
|
||||
require('../extract-dependencies').mockReturnValue({});
|
||||
transformer = {
|
||||
transform: jest.fn(({filename, options, src}) => ({
|
||||
code: src,
|
||||
map: {},
|
||||
})),
|
||||
};
|
||||
});
|
||||
|
||||
it('calls the transform with file name, source code, and transform options', function() {
|
||||
const filename = 'arbitrary/file.js';
|
||||
const localPath = `local/${filename}`;
|
||||
const sourceCode = 'arbitrary(code)';
|
||||
const transformOptions = {arbitrary: 'options'};
|
||||
transformCode(transformer, filename, localPath, sourceCode, {transform: transformOptions}, () => {});
|
||||
expect(transformer.transform).toBeCalledWith({
|
||||
filename,
|
||||
localPath,
|
||||
options: transformOptions,
|
||||
src: sourceCode,
|
||||
});
|
||||
});
|
||||
|
||||
it('prefixes JSON files with an assignment to module.exports to make the code valid', function() {
|
||||
const filename = 'arbitrary/file.json';
|
||||
const localPath = `local/${filename}`;
|
||||
const sourceCode = '{"arbitrary":"property"}';
|
||||
transformCode(transformer, filename, localPath, sourceCode, {}, () => {});
|
||||
expect(transformer.transform).toBeCalledWith({
|
||||
filename,
|
||||
localPath,
|
||||
options: undefined,
|
||||
src: `module.exports=${sourceCode}`,
|
||||
});
|
||||
});
|
||||
|
||||
it('calls back with the result of the transform in the cache', done => {
|
||||
const result = {
|
||||
code: 'some.other(code)',
|
||||
map: {},
|
||||
};
|
||||
|
||||
transformCode(transformer, 'filename', 'local/filename', result.code, {}, (error, data) => {
|
||||
expect(error).toBeNull();
|
||||
expect(data.result).toEqual(objectContaining(result));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it(
|
||||
'removes the leading assignment to `module.exports` before passing ' +
|
||||
'on the result if the file is a JSON file, even if minified',
|
||||
done => {
|
||||
const code = '{a:1,b:2}';
|
||||
const filePath = 'arbitrary/file.json';
|
||||
transformCode(transformer, filePath, filePath, code, {}, (error, data) => {
|
||||
expect(error).toBeNull();
|
||||
expect(data.result.code).toEqual(code);
|
||||
done();
|
||||
},
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it('removes shebang when present', done => {
|
||||
const shebang = '#!/usr/bin/env node';
|
||||
const result = {
|
||||
code: `${shebang} \n arbitrary(code)`,
|
||||
};
|
||||
const filePath = 'arbitrary/file.js';
|
||||
transformCode(transformer, filePath, filePath, result.code, {}, (error, data) => {
|
||||
expect(error).toBeNull();
|
||||
const {code} = data.result;
|
||||
expect(code).not.toContain(shebang);
|
||||
expect(code.split('\n').length).toEqual(result.code.split('\n').length);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls back with any error yielded by the transform', done => {
|
||||
const message = 'SyntaxError: this code is broken.';
|
||||
transformer.transform.mockImplementation(() => {
|
||||
throw new Error(message);
|
||||
});
|
||||
|
||||
transformCode(transformer, 'filename', 'local/filename', 'code', {}, error => {
|
||||
expect(error.message).toBe(message);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('dependency extraction', () => {
|
||||
it('passes the transformed code the `extractDependencies`', done => {
|
||||
const code = 'arbitrary(code)';
|
||||
|
||||
transformCode(transformer, 'filename', 'local/filename', code, {}, error => {
|
||||
expect(error).toBeNull();
|
||||
expect(extractDependencies).toBeCalledWith(code);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it(
|
||||
'uses `dependencies` and `dependencyOffsets` ' +
|
||||
'provided by `extractDependencies` for the result',
|
||||
done => {
|
||||
const dependencyData = {
|
||||
dependencies: ['arbitrary', 'list', 'of', 'dependencies'],
|
||||
dependencyOffsets: [12, 119, 185, 328, 471],
|
||||
};
|
||||
extractDependencies.mockReturnValue(dependencyData);
|
||||
|
||||
transformCode(transformer, 'filename', 'local/filename', 'code', {}, (error, data) => {
|
||||
expect(error).toBeNull();
|
||||
expect(data.result).toEqual(objectContaining(dependencyData));
|
||||
done();
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
it('does not extract requires of JSON files', done => {
|
||||
const jsonStr = '{"arbitrary":"json"}';
|
||||
transformCode(transformer, 'arbitrary.json', 'local/arbitrary.json', jsonStr, {}, (error, data) => {
|
||||
expect(error).toBeNull();
|
||||
const {dependencies, dependencyOffsets} = data.result;
|
||||
expect(extractDependencies).not.toBeCalled();
|
||||
expect(dependencies).toEqual([]);
|
||||
expect(dependencyOffsets).toEqual([]);
|
||||
done();
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Minifications:', () => {
|
||||
let constantFolding, inline, options;
|
||||
let transformResult, dependencyData;
|
||||
const filename = 'arbitrary/file.js';
|
||||
const foldedCode = 'arbitrary(folded(code));';
|
||||
const foldedMap = {version: 3, sources: ['fold.js']};
|
||||
|
||||
beforeEach(() => {
|
||||
constantFolding = require('../constant-folding')
|
||||
.mockReturnValue({code: foldedCode, map: foldedMap});
|
||||
extractDependencies = require('../extract-dependencies');
|
||||
inline = require('../inline');
|
||||
|
||||
options = {minify: true, transform: {generateSourceMaps: true}};
|
||||
dependencyData = {
|
||||
dependencies: ['a', 'b', 'c'],
|
||||
dependencyOffsets: [100, 120, 140],
|
||||
};
|
||||
|
||||
extractDependencies.mockImplementation(
|
||||
code => code === foldedCode ? dependencyData : {});
|
||||
|
||||
transformer.transform.mockImplementation((src, fileName, _) => transformResult);
|
||||
});
|
||||
|
||||
it('passes the transform result to `inline` for constant inlining', done => {
|
||||
transformResult = {map: {version: 3}, code: 'arbitrary(code)'};
|
||||
transformCode(transformer, filename, filename, 'code', options, () => {
|
||||
expect(inline).toBeCalledWith(filename, transformResult, options);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('passes the result obtained from `inline` on to `constant-folding`', done => {
|
||||
const inlineResult = {map: {version: 3, sources: []}, ast: {}};
|
||||
inline.mockReturnValue(inlineResult);
|
||||
transformCode(transformer, filename, filename, 'code', options, () => {
|
||||
expect(constantFolding).toBeCalledWith(filename, inlineResult);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Uses the code obtained from `constant-folding` to extract dependencies', done => {
|
||||
transformCode(transformer, filename, filename, 'code', options, () => {
|
||||
expect(extractDependencies).toBeCalledWith(foldedCode);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('uses the dependencies obtained from the optimized result', done => {
|
||||
transformCode(transformer, filename, filename, 'code', options, (_, data) => {
|
||||
const result = data.result;
|
||||
expect(result.dependencies).toEqual(dependencyData.dependencies);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('uses data produced by `constant-folding` for the result', done => {
|
||||
transformCode(transformer, 'filename', 'local/filename', 'code', options, (_, data) => {
|
||||
expect(data.result)
|
||||
.toEqual(objectContaining({code: foldedCode, map: foldedMap}));
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,91 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const babel = require('babel-core');
|
||||
|
||||
import type {Ast, SourceMap as MappingsMap} from 'babel-core';
|
||||
const t = babel.types;
|
||||
|
||||
const Conditional = {
|
||||
exit(path) {
|
||||
const node = path.node;
|
||||
const test = node.test;
|
||||
if (t.isLiteral(test)) {
|
||||
if (test.value || node.alternate) {
|
||||
path.replaceWith(test.value ? node.consequent : node.alternate);
|
||||
} else if (!test.value) {
|
||||
path.remove();
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const plugin = {
|
||||
visitor: {
|
||||
BinaryExpression: {
|
||||
exit(path) {
|
||||
const node = path.node;
|
||||
if (t.isLiteral(node.left) && t.isLiteral(node.right)) {
|
||||
const result = path.evaluate();
|
||||
if (result.confident) {
|
||||
path.replaceWith(t.valueToNode(result.value));
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
ConditionalExpression: Conditional,
|
||||
IfStatement: Conditional,
|
||||
LogicalExpression: {
|
||||
exit(path) {
|
||||
const node = path.node;
|
||||
const left = node.left;
|
||||
if (t.isLiteral(left)) {
|
||||
const value = t.isNullLiteral(left) ? null : left.value;
|
||||
if (node.operator === '||') {
|
||||
path.replaceWith(value ? left : node.right);
|
||||
} else {
|
||||
path.replaceWith(value ? node.right : left);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
UnaryExpression: {
|
||||
exit(path) {
|
||||
const node = path.node;
|
||||
if (node.operator === '!' && t.isLiteral(node.argument)) {
|
||||
path.replaceWith(t.valueToNode(!node.argument.value));
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
function constantFolding(filename: string, transformResult: {
|
||||
ast: Ast,
|
||||
code?: ?string,
|
||||
map: ?MappingsMap,
|
||||
}) {
|
||||
return babel.transformFromAst(transformResult.ast, transformResult.code, {
|
||||
filename,
|
||||
plugins: [plugin],
|
||||
inputSourceMap: transformResult.map,
|
||||
sourceMaps: true,
|
||||
sourceFileName: filename,
|
||||
babelrc: false,
|
||||
compact: true,
|
||||
retainLines: true,
|
||||
});
|
||||
}
|
||||
|
||||
constantFolding.plugin = plugin;
|
||||
module.exports = constantFolding;
|
|
@ -1,52 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const babel = require('babel-core');
|
||||
const babylon = require('babylon');
|
||||
|
||||
/**
|
||||
* Extracts dependencies (module IDs imported with the `require` function) from
|
||||
* a string containing code. This walks the full AST for correctness (versus
|
||||
* using, for example, regular expressions, that would be faster but inexact.)
|
||||
*
|
||||
* The result of the dependency extraction is an de-duplicated array of
|
||||
* dependencies, and an array of offsets to the string literals with module IDs.
|
||||
* The index points to the opening quote.
|
||||
*/
|
||||
function extractDependencies(code: string) {
|
||||
const ast = babylon.parse(code);
|
||||
const dependencies = new Set();
|
||||
const dependencyOffsets = [];
|
||||
|
||||
babel.traverse(ast, {
|
||||
CallExpression(path) {
|
||||
const node = path.node;
|
||||
const callee = node.callee;
|
||||
const arg = node.arguments[0];
|
||||
if (
|
||||
callee.type !== 'Identifier' ||
|
||||
callee.name !== 'require' ||
|
||||
!arg ||
|
||||
arg.type !== 'StringLiteral'
|
||||
) {
|
||||
return;
|
||||
}
|
||||
dependencyOffsets.push(arg.start);
|
||||
dependencies.add(arg.value);
|
||||
},
|
||||
});
|
||||
|
||||
return {dependencyOffsets, dependencies: Array.from(dependencies)};
|
||||
}
|
||||
|
||||
module.exports = extractDependencies;
|
|
@ -1,191 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const babelRegisterOnly = require('../../babelRegisterOnly');
|
||||
const constantFolding = require('./constant-folding');
|
||||
const extractDependencies = require('./extract-dependencies');
|
||||
const inline = require('./inline');
|
||||
const invariant = require('fbjs/lib/invariant');
|
||||
const minify = require('./minify');
|
||||
|
||||
import type {LogEntry} from '../../Logger/Types';
|
||||
import type {MappingsMap} from '../../lib/SourceMap';
|
||||
import type {LocalPath} from '../../node-haste/lib/toLocalPath';
|
||||
import type {Ast, Plugins as BabelPlugins} from 'babel-core';
|
||||
|
||||
export type TransformedCode = {
|
||||
code: string,
|
||||
dependencies: Array<string>,
|
||||
dependencyOffsets: Array<number>,
|
||||
map?: ?MappingsMap,
|
||||
};
|
||||
|
||||
export type Transformer<ExtraOptions: {} = {}> = {
|
||||
transform: ({|
|
||||
filename: string,
|
||||
localPath: string,
|
||||
options: ExtraOptions & TransformOptions,
|
||||
plugins?: BabelPlugins,
|
||||
src: string,
|
||||
|}) => {ast: ?Ast, code: string, map: ?MappingsMap},
|
||||
getCacheKey: () => string,
|
||||
};
|
||||
|
||||
|
||||
export type TransformOptionsStrict = {|
|
||||
+dev: boolean,
|
||||
+generateSourceMaps: boolean,
|
||||
+hot: boolean,
|
||||
+inlineRequires: {+blacklist: {[string]: true}} | boolean,
|
||||
+platform: ?string,
|
||||
+projectRoot: string,
|
||||
|};
|
||||
|
||||
export type TransformOptions = {
|
||||
+dev?: boolean,
|
||||
+generateSourceMaps?: boolean,
|
||||
+hot?: boolean,
|
||||
+inlineRequires?: {+blacklist: {[string]: true}} | boolean,
|
||||
+platform: ?string,
|
||||
+projectRoot: string,
|
||||
};
|
||||
|
||||
export type Options = {|
|
||||
+dev: boolean,
|
||||
+minify: boolean,
|
||||
+platform: ?string,
|
||||
+transform: TransformOptionsStrict,
|
||||
|};
|
||||
|
||||
export type Data = {
|
||||
result: TransformedCode,
|
||||
transformFileStartLogEntry: LogEntry,
|
||||
transformFileEndLogEntry: LogEntry,
|
||||
};
|
||||
|
||||
type Callback<T> = (
|
||||
error: ?Error,
|
||||
data: ?T,
|
||||
) => mixed;
|
||||
|
||||
function transformCode(
|
||||
transformer: Transformer<*>,
|
||||
filename: string,
|
||||
localPath: LocalPath,
|
||||
sourceCode: string,
|
||||
options: Options,
|
||||
callback: Callback<Data>,
|
||||
) {
|
||||
invariant(
|
||||
!options.minify || options.transform.generateSourceMaps,
|
||||
'Minifying source code requires the `generateSourceMaps` option to be `true`',
|
||||
);
|
||||
|
||||
const isJson = filename.endsWith('.json');
|
||||
if (isJson) {
|
||||
sourceCode = 'module.exports=' + sourceCode;
|
||||
}
|
||||
|
||||
const transformFileStartLogEntry = {
|
||||
action_name: 'Transforming file',
|
||||
action_phase: 'start',
|
||||
file_name: filename,
|
||||
log_entry_label: 'Transforming file',
|
||||
start_timestamp: process.hrtime(),
|
||||
};
|
||||
|
||||
let transformed;
|
||||
try {
|
||||
transformed = transformer.transform({
|
||||
filename,
|
||||
localPath,
|
||||
options: options.transform,
|
||||
src: sourceCode,
|
||||
});
|
||||
} catch (error) {
|
||||
callback(error);
|
||||
return;
|
||||
}
|
||||
|
||||
invariant(
|
||||
transformed != null,
|
||||
'Missing transform results despite having no error.',
|
||||
);
|
||||
|
||||
var code, map;
|
||||
if (options.minify) {
|
||||
({code, map} =
|
||||
constantFolding(filename, inline(filename, transformed, options)));
|
||||
invariant(code != null, 'Missing code from constant-folding transform.');
|
||||
} else {
|
||||
({code, map} = transformed);
|
||||
}
|
||||
|
||||
if (isJson) {
|
||||
code = code.replace(/^\w+\.exports=/, '');
|
||||
} else {
|
||||
// Remove shebang
|
||||
code = code.replace(/^#!.*/, '');
|
||||
}
|
||||
|
||||
const depsResult = isJson
|
||||
? {dependencies: [], dependencyOffsets: []}
|
||||
: extractDependencies(code);
|
||||
|
||||
const timeDelta = process.hrtime(transformFileStartLogEntry.start_timestamp);
|
||||
const duration_ms = Math.round((timeDelta[0] * 1e9 + timeDelta[1]) / 1e6);
|
||||
const transformFileEndLogEntry = {
|
||||
action_name: 'Transforming file',
|
||||
action_phase: 'end',
|
||||
file_name: filename,
|
||||
duration_ms,
|
||||
log_entry_label: 'Transforming file',
|
||||
};
|
||||
|
||||
callback(null, {
|
||||
result: {...depsResult, code, map},
|
||||
transformFileStartLogEntry,
|
||||
transformFileEndLogEntry,
|
||||
});
|
||||
}
|
||||
|
||||
exports.transformAndExtractDependencies = (
|
||||
transform: string,
|
||||
filename: string,
|
||||
localPath: LocalPath,
|
||||
sourceCode: string,
|
||||
options: Options,
|
||||
callback: Callback<Data>,
|
||||
) => {
|
||||
babelRegisterOnly([transform]);
|
||||
/* $FlowFixMe: impossible to type a dynamic require */
|
||||
const transformModule: Transformer<*> = require(transform);
|
||||
transformCode(transformModule, filename, localPath, sourceCode, options, callback);
|
||||
};
|
||||
|
||||
exports.minify = (
|
||||
filename: string,
|
||||
code: string,
|
||||
sourceMap: MappingsMap,
|
||||
callback: Callback<{code: string, map: MappingsMap}>,
|
||||
) => {
|
||||
var result;
|
||||
try {
|
||||
result = minify(filename, code, sourceMap);
|
||||
} catch (error) {
|
||||
callback(error);
|
||||
}
|
||||
callback(null, result);
|
||||
};
|
||||
|
||||
exports.transformCode = transformCode; // for easier testing
|
|
@ -1,196 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const babel = require('babel-core');
|
||||
const invariant = require('fbjs/lib/invariant');
|
||||
|
||||
import type {Ast, SourceMap as MappingsMap} from 'babel-core';
|
||||
const t = babel.types;
|
||||
|
||||
const React = {name: 'React'};
|
||||
const ReactNative = {name: 'ReactNative'};
|
||||
const platform = {name: 'Platform'};
|
||||
const os = {name: 'OS'};
|
||||
const select = {name: 'select'};
|
||||
const requirePattern = {name: 'require'};
|
||||
|
||||
const env = {name: 'env'};
|
||||
const nodeEnv = {name: 'NODE_ENV'};
|
||||
const processId = {name: 'process'};
|
||||
|
||||
const dev = {name: '__DEV__'};
|
||||
|
||||
const importMap = new Map([['ReactNative', 'react-native']]);
|
||||
|
||||
const isGlobal = binding => !binding;
|
||||
|
||||
const isFlowDeclared = binding =>
|
||||
t.isDeclareVariable(binding.path);
|
||||
|
||||
const isGlobalOrFlowDeclared = binding =>
|
||||
isGlobal(binding) || isFlowDeclared(binding);
|
||||
|
||||
const isToplevelBinding = (binding, isWrappedModule) =>
|
||||
isGlobal(binding) ||
|
||||
!binding.scope.parent ||
|
||||
isWrappedModule && !binding.scope.parent.parent;
|
||||
|
||||
const isRequireCall = (node, dependencyId, scope) =>
|
||||
t.isCallExpression(node) &&
|
||||
t.isIdentifier(node.callee, requirePattern) &&
|
||||
checkRequireArgs(node.arguments, dependencyId);
|
||||
|
||||
const isImport = (node, scope, patterns) =>
|
||||
patterns.some(pattern => {
|
||||
const importName = importMap.get(pattern.name) || pattern.name;
|
||||
return isRequireCall(node, importName, scope);
|
||||
});
|
||||
|
||||
function isImportOrGlobal(node, scope, patterns, isWrappedModule) {
|
||||
const identifier = patterns.find(pattern => t.isIdentifier(node, pattern));
|
||||
return (
|
||||
identifier &&
|
||||
isToplevelBinding(scope.getBinding(identifier.name), isWrappedModule) ||
|
||||
isImport(node, scope, patterns)
|
||||
);
|
||||
}
|
||||
|
||||
const isPlatformOS = (node, scope, isWrappedModule) =>
|
||||
t.isIdentifier(node.property, os) &&
|
||||
isImportOrGlobal(node.object, scope, [platform], isWrappedModule);
|
||||
|
||||
const isReactPlatformOS = (node, scope, isWrappedModule) =>
|
||||
t.isIdentifier(node.property, os) &&
|
||||
t.isMemberExpression(node.object) &&
|
||||
t.isIdentifier(node.object.property, platform) &&
|
||||
isImportOrGlobal(
|
||||
node.object.object, scope, [React, ReactNative], isWrappedModule);
|
||||
|
||||
const isProcessEnvNodeEnv = (node, scope) =>
|
||||
t.isIdentifier(node.property, nodeEnv) &&
|
||||
t.isMemberExpression(node.object) &&
|
||||
t.isIdentifier(node.object.property, env) &&
|
||||
t.isIdentifier(node.object.object, processId) &&
|
||||
isGlobal(scope.getBinding(processId.name));
|
||||
|
||||
const isPlatformSelect = (node, scope, isWrappedModule) =>
|
||||
t.isMemberExpression(node.callee) &&
|
||||
t.isIdentifier(node.callee.object, platform) &&
|
||||
t.isIdentifier(node.callee.property, select) &&
|
||||
isImportOrGlobal(node.callee.object, scope, [platform], isWrappedModule);
|
||||
|
||||
const isReactPlatformSelect = (node, scope, isWrappedModule) =>
|
||||
t.isMemberExpression(node.callee) &&
|
||||
t.isIdentifier(node.callee.property, select) &&
|
||||
t.isMemberExpression(node.callee.object) &&
|
||||
t.isIdentifier(node.callee.object.property, platform) &&
|
||||
isImportOrGlobal(
|
||||
node.callee.object.object, scope, [React, ReactNative], isWrappedModule);
|
||||
|
||||
const isDev = (node, parent, scope) =>
|
||||
t.isIdentifier(node, dev) &&
|
||||
isGlobalOrFlowDeclared(scope.getBinding(dev.name)) &&
|
||||
!(t.isMemberExpression(parent));
|
||||
|
||||
function findProperty(objectExpression, key, fallback) {
|
||||
const property = objectExpression.properties.find(p => p.key.name === key);
|
||||
return property ? property.value : fallback();
|
||||
}
|
||||
|
||||
const inlinePlugin = {
|
||||
visitor: {
|
||||
Identifier(path, state) {
|
||||
if (isDev(path.node, path.parent, path.scope)) {
|
||||
path.replaceWith(t.booleanLiteral(state.opts.dev));
|
||||
}
|
||||
},
|
||||
MemberExpression(path, state) {
|
||||
const node = path.node;
|
||||
const scope = path.scope;
|
||||
const opts = state.opts;
|
||||
|
||||
if (
|
||||
isPlatformOS(node, scope, opts.isWrapped) ||
|
||||
isReactPlatformOS(node, scope, opts.isWrapped)
|
||||
) {
|
||||
path.replaceWith(t.stringLiteral(opts.platform));
|
||||
} else if (isProcessEnvNodeEnv(node, scope)) {
|
||||
path.replaceWith(
|
||||
t.stringLiteral(opts.dev ? 'development' : 'production'));
|
||||
}
|
||||
},
|
||||
CallExpression(path, state) {
|
||||
const node = path.node;
|
||||
const scope = path.scope;
|
||||
const arg = node.arguments[0];
|
||||
const opts = state.opts;
|
||||
|
||||
if (
|
||||
isPlatformSelect(node, scope, opts.isWrapped) ||
|
||||
isReactPlatformSelect(node, scope, opts.isWrapped)
|
||||
) {
|
||||
const fallback = () =>
|
||||
findProperty(arg, 'default', () => t.identifier('undefined'));
|
||||
const replacement = t.isObjectExpression(arg)
|
||||
? findProperty(arg, opts.platform, fallback)
|
||||
: node;
|
||||
|
||||
path.replaceWith(replacement);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const plugin = () => inlinePlugin;
|
||||
|
||||
function checkRequireArgs(args, dependencyId) {
|
||||
const pattern = t.stringLiteral(dependencyId);
|
||||
return t.isStringLiteral(args[0], pattern) ||
|
||||
t.isMemberExpression(args[0]) &&
|
||||
t.isNumericLiteral(args[0].property) &&
|
||||
t.isStringLiteral(args[1], pattern);
|
||||
}
|
||||
|
||||
type AstResult = {
|
||||
ast: Ast,
|
||||
code: ?string,
|
||||
map: ?MappingsMap,
|
||||
};
|
||||
|
||||
function inline(
|
||||
filename: string,
|
||||
transformResult: {ast?: ?Ast, code: string, map: ?MappingsMap},
|
||||
options: {+dev: boolean, +platform: ?string},
|
||||
): AstResult {
|
||||
const code = transformResult.code;
|
||||
const babelOptions = {
|
||||
filename,
|
||||
plugins: [[plugin, options]],
|
||||
inputSourceMap: transformResult.map,
|
||||
sourceMaps: true,
|
||||
sourceFileName: filename,
|
||||
code: false,
|
||||
babelrc: false,
|
||||
compact: true,
|
||||
};
|
||||
|
||||
const result = transformResult.ast
|
||||
? babel.transformFromAst(transformResult.ast, code, babelOptions)
|
||||
: babel.transform(code, babelOptions);
|
||||
const {ast} = result;
|
||||
invariant(ast != null, 'Missing AST in babel transform results.');
|
||||
return {ast, code: result.code, map: result.map};
|
||||
}
|
||||
|
||||
inline.plugin = inlinePlugin;
|
||||
module.exports = inline;
|
|
@ -1,34 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const uglify = require('uglify-js');
|
||||
|
||||
const {UGLIFY_JS_OUTPUT_OPTIONS} = require('./JsMinification');
|
||||
|
||||
import type {MappingsMap} from '../../lib/SourceMap';
|
||||
|
||||
function minify(filename: string, inputCode: string, sourceMap: ?MappingsMap) {
|
||||
const result = uglify.minify(inputCode, {
|
||||
fromString: true,
|
||||
inSourceMap: sourceMap,
|
||||
outSourceMap: true,
|
||||
output: UGLIFY_JS_OUTPUT_OPTIONS,
|
||||
});
|
||||
|
||||
const code = result.code;
|
||||
const map = JSON.parse(result.map);
|
||||
map.sources = [filename];
|
||||
return {code, map};
|
||||
}
|
||||
|
||||
module.exports = minify;
|
|
@ -1,33 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
export type ActionLogEntryData = {
|
||||
action_name: string,
|
||||
};
|
||||
|
||||
export type ActionStartLogEntry = {
|
||||
action_name?: string,
|
||||
action_phase?: string,
|
||||
log_entry_label: string,
|
||||
log_session?: string,
|
||||
start_timestamp?: [number, number],
|
||||
};
|
||||
|
||||
export type LogEntry = {
|
||||
action_name?: string,
|
||||
action_phase?: string,
|
||||
duration_ms?: number,
|
||||
log_entry_label: string,
|
||||
log_session?: string,
|
||||
start_timestamp?: [number, number],
|
||||
};
|
|
@ -1,37 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const mockColor = () => {
|
||||
return {
|
||||
bold: () => { return { }; },
|
||||
};
|
||||
};
|
||||
|
||||
mockColor.bold = function() {
|
||||
return {};
|
||||
};
|
||||
|
||||
mockColor.bgRed = function() {
|
||||
return {};
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
dim: s => s,
|
||||
magenta: mockColor,
|
||||
white: mockColor,
|
||||
blue: mockColor,
|
||||
yellow: mockColor,
|
||||
green: mockColor,
|
||||
bold: mockColor,
|
||||
red: mockColor,
|
||||
cyan: mockColor,
|
||||
gray: mockColor,
|
||||
black: mockColor,
|
||||
};
|
|
@ -1,66 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* eslint-disable no-console-disallow
|
||||
*
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const {
|
||||
createEntry,
|
||||
createActionStartEntry,
|
||||
createActionEndEntry,
|
||||
} = require('../');
|
||||
|
||||
describe('Logger', () => {
|
||||
const originalConsoleLog = console.log;
|
||||
|
||||
beforeEach(() => {
|
||||
console.log = jest.fn();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
});
|
||||
|
||||
it('creates simple log entries', () => {
|
||||
const logEntry = createEntry('Test');
|
||||
expect(logEntry).toEqual({
|
||||
log_entry_label: 'Test',
|
||||
log_session: jasmine.any(String),
|
||||
packager_version: jasmine.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it('creates action start log entries', () => {
|
||||
const actionStartLogEntry = createActionStartEntry('Test');
|
||||
expect(actionStartLogEntry).toEqual({
|
||||
action_name: 'Test',
|
||||
action_phase: 'start',
|
||||
log_entry_label: 'Test',
|
||||
log_session: jasmine.any(String),
|
||||
packager_version: jasmine.any(String),
|
||||
start_timestamp: jasmine.any(Object),
|
||||
});
|
||||
});
|
||||
|
||||
it('creates action end log entries', () => {
|
||||
const actionEndLogEntry = createActionEndEntry(createActionStartEntry('Test'));
|
||||
expect(actionEndLogEntry).toEqual({
|
||||
action_name: 'Test',
|
||||
action_phase: 'end',
|
||||
duration_ms: jasmine.any(Number),
|
||||
log_entry_label: 'Test',
|
||||
log_session: jasmine.any(String),
|
||||
packager_version: jasmine.any(String),
|
||||
start_timestamp: jasmine.any(Object),
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,90 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const os = require('os');
|
||||
|
||||
const {EventEmitter} = require('events');
|
||||
|
||||
const VERSION = require('../../package.json').version;
|
||||
|
||||
import type {
|
||||
ActionLogEntryData,
|
||||
ActionStartLogEntry,
|
||||
LogEntry,
|
||||
} from './Types';
|
||||
|
||||
const log_session = `${os.hostname()}-${Date.now()}`;
|
||||
const eventEmitter = new EventEmitter();
|
||||
|
||||
function on(event: string, handler: (logEntry: LogEntry) => void): void {
|
||||
eventEmitter.on(event, handler);
|
||||
}
|
||||
|
||||
function createEntry(data: LogEntry | string): LogEntry {
|
||||
const logEntry = typeof data === 'string' ? {log_entry_label: data} : data;
|
||||
|
||||
return {
|
||||
...logEntry,
|
||||
log_session,
|
||||
packager_version: VERSION,
|
||||
};
|
||||
}
|
||||
|
||||
function createActionStartEntry(data: ActionLogEntryData | string): LogEntry {
|
||||
const logEntry = typeof data === 'string' ? {action_name: data} : data;
|
||||
const {action_name} = logEntry;
|
||||
|
||||
return createEntry({
|
||||
...logEntry,
|
||||
action_name,
|
||||
action_phase: 'start',
|
||||
log_entry_label: action_name,
|
||||
start_timestamp: process.hrtime(),
|
||||
});
|
||||
}
|
||||
|
||||
function createActionEndEntry(logEntry: ActionStartLogEntry): LogEntry {
|
||||
const {
|
||||
action_name,
|
||||
action_phase,
|
||||
start_timestamp,
|
||||
} = logEntry;
|
||||
|
||||
if (action_phase !== 'start' || !Array.isArray(start_timestamp)) {
|
||||
throw new Error('Action has not started or has already ended');
|
||||
}
|
||||
|
||||
const timeDelta = process.hrtime(start_timestamp);
|
||||
const duration_ms = Math.round((timeDelta[0] * 1e9 + timeDelta[1]) / 1e6);
|
||||
|
||||
return createEntry({
|
||||
...logEntry,
|
||||
action_name,
|
||||
action_phase: 'end',
|
||||
duration_ms,
|
||||
log_entry_label: action_name,
|
||||
});
|
||||
}
|
||||
|
||||
function log(logEntry: LogEntry): LogEntry {
|
||||
eventEmitter.emit('log', logEntry);
|
||||
return logEntry;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
on,
|
||||
createEntry,
|
||||
createActionStartEntry,
|
||||
createActionEndEntry,
|
||||
log,
|
||||
};
|
|
@ -1,170 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const emptyFunction = require('fbjs/lib/emptyFunction');
|
||||
const invariant = require('fbjs/lib/invariant');
|
||||
const memoize = require('async/memoize');
|
||||
const emptyModule = require('./module').empty;
|
||||
const nullthrows = require('fbjs/lib/nullthrows');
|
||||
const queue = require('async/queue');
|
||||
const seq = require('async/seq');
|
||||
|
||||
import type {
|
||||
Callback,
|
||||
File,
|
||||
GraphFn,
|
||||
LoadFn,
|
||||
ResolveFn,
|
||||
} from './types.flow';
|
||||
|
||||
type Async$Queue<T, C> = {
|
||||
buffer: number,
|
||||
concurrency: number,
|
||||
drain: () => mixed,
|
||||
empty: () => mixed,
|
||||
error: (Error, T) => mixed,
|
||||
idle(): boolean,
|
||||
kill(): void,
|
||||
length(): number,
|
||||
pause(): void,
|
||||
paused: boolean,
|
||||
push(T | Array<T>, void | C): void,
|
||||
resume(): void,
|
||||
running(): number,
|
||||
saturated: () => mixed,
|
||||
started: boolean,
|
||||
unsaturated: () => mixed,
|
||||
unshift(T, void | C): void,
|
||||
workersList(): Array<T>,
|
||||
};
|
||||
|
||||
type LoadQueue =
|
||||
Async$Queue<{id: string, parent: ?string}, Callback<File, Array<string>>>;
|
||||
|
||||
const NO_OPTIONS = {};
|
||||
|
||||
exports.create = function create(resolve: ResolveFn, load: LoadFn): GraphFn {
|
||||
function Graph(entryPoints, platform, options, callback = emptyFunction) {
|
||||
const {
|
||||
log = (console: any),
|
||||
optimize = false,
|
||||
skip,
|
||||
} = options || NO_OPTIONS;
|
||||
|
||||
if (typeof platform !== 'string') {
|
||||
log.error('`Graph`, called without a platform');
|
||||
callback(Error('The target platform has to be passed'));
|
||||
return;
|
||||
}
|
||||
|
||||
const loadQueue: LoadQueue = queue(seq(
|
||||
({id, parent}, cb) => resolve(id, parent, platform, options || NO_OPTIONS, cb),
|
||||
memoize((file, cb) => load(file, {log, optimize}, cb)),
|
||||
), Number.MAX_SAFE_INTEGER);
|
||||
|
||||
const {collect, loadModule} = createGraphHelpers(loadQueue, skip);
|
||||
|
||||
loadQueue.drain = () => {
|
||||
loadQueue.kill();
|
||||
callback(null, collect());
|
||||
};
|
||||
loadQueue.error = error => {
|
||||
loadQueue.error = emptyFunction;
|
||||
loadQueue.kill();
|
||||
callback(error);
|
||||
};
|
||||
|
||||
let i = 0;
|
||||
for (const entryPoint of entryPoints) {
|
||||
loadModule(entryPoint, null, i++);
|
||||
}
|
||||
|
||||
if (i === 0) {
|
||||
log.error('`Graph` called without any entry points');
|
||||
loadQueue.kill();
|
||||
callback(Error('At least one entry point has to be passed.'));
|
||||
}
|
||||
}
|
||||
|
||||
return Graph;
|
||||
};
|
||||
|
||||
function createGraphHelpers(loadQueue, skip) {
|
||||
const modules = new Map([[null, emptyModule()]]);
|
||||
|
||||
function collect(
|
||||
path = null,
|
||||
serialized = {entryModules: [], modules: []},
|
||||
seen = new Set(),
|
||||
) {
|
||||
const module = modules.get(path);
|
||||
if (module == null || seen.has(path)) {
|
||||
return serialized;
|
||||
}
|
||||
|
||||
const {dependencies} = module;
|
||||
if (path === null) {
|
||||
serialized.entryModules =
|
||||
dependencies.map(dep => nullthrows(modules.get(dep.path)));
|
||||
} else {
|
||||
serialized.modules.push(module);
|
||||
seen.add(path);
|
||||
}
|
||||
|
||||
for (const dependency of dependencies) {
|
||||
collect(dependency.path, serialized, seen);
|
||||
}
|
||||
|
||||
return serialized;
|
||||
}
|
||||
|
||||
function loadModule(id, parent, parentDepIndex) {
|
||||
loadQueue.push(
|
||||
{id, parent},
|
||||
(error, file, dependencyIDs) =>
|
||||
onFileLoaded(error, file, dependencyIDs, id, parent, parentDepIndex),
|
||||
);
|
||||
}
|
||||
|
||||
function onFileLoaded(
|
||||
error,
|
||||
file,
|
||||
dependencyIDs,
|
||||
id,
|
||||
parent,
|
||||
parentDependencyIndex,
|
||||
) {
|
||||
if (error) {
|
||||
return;
|
||||
}
|
||||
|
||||
const {path} = nullthrows(file);
|
||||
dependencyIDs = nullthrows(dependencyIDs);
|
||||
|
||||
const parentModule = modules.get(parent);
|
||||
invariant(parentModule, 'Invalid parent module: ' + String(parent));
|
||||
parentModule.dependencies[parentDependencyIndex] = {id, path};
|
||||
|
||||
if ((!skip || !skip.has(path)) && !modules.has(path)) {
|
||||
const module = {
|
||||
dependencies: Array(dependencyIDs.length),
|
||||
file: nullthrows(file),
|
||||
};
|
||||
modules.set(path, module);
|
||||
for (let i = 0; i < dependencyIDs.length; ++i) {
|
||||
loadModule(dependencyIDs[i], path, i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {collect, loadModule};
|
||||
}
|
|
@ -1,113 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const defaults = require('../defaults');
|
||||
const nullthrows = require('fbjs/lib/nullthrows');
|
||||
const parallel = require('async/parallel');
|
||||
const seq = require('async/seq');
|
||||
const virtualModule = require('./module').virtual;
|
||||
|
||||
import type {
|
||||
BuildResult,
|
||||
Callback,
|
||||
GraphFn,
|
||||
GraphResult,
|
||||
Module,
|
||||
PostProcessModules,
|
||||
} from './types.flow';
|
||||
|
||||
type BuildFn = (
|
||||
entryPoints: Iterable<string>,
|
||||
options: BuildOptions,
|
||||
callback: Callback<BuildResult>,
|
||||
) => void;
|
||||
|
||||
type BuildOptions = {|
|
||||
optimize: boolean,
|
||||
platform: string,
|
||||
|};
|
||||
|
||||
exports.createBuildSetup = (
|
||||
graph: GraphFn,
|
||||
postProcessModules: PostProcessModules,
|
||||
translateDefaultsPath: string => string = x => x,
|
||||
): BuildFn =>
|
||||
(entryPoints, options, callback) => {
|
||||
const {
|
||||
optimize = false,
|
||||
platform = defaults.platforms[0],
|
||||
} = options;
|
||||
const graphOptions = {optimize};
|
||||
|
||||
const graphWithOptions =
|
||||
(entry, cb) => graph(entry, platform, graphOptions, cb);
|
||||
const graphOnlyModules = seq(graphWithOptions, getModules);
|
||||
|
||||
parallel({
|
||||
graph: cb => graphWithOptions(entryPoints, (error, result) => {
|
||||
if (error) {
|
||||
cb(error);
|
||||
return;
|
||||
}
|
||||
/* $FlowFixMe: not undefined if there is no error */
|
||||
const {modules, entryModules} = result;
|
||||
const prModules = postProcessModules(modules, [...entryPoints]);
|
||||
cb(null, {modules: prModules, entryModules});
|
||||
}),
|
||||
moduleSystem: cb => graphOnlyModules(
|
||||
[translateDefaultsPath(defaults.moduleSystem)],
|
||||
cb,
|
||||
),
|
||||
polyfills: cb => graphOnlyModules(
|
||||
defaults.polyfills.map(translateDefaultsPath),
|
||||
cb,
|
||||
),
|
||||
}, (
|
||||
error: ?Error,
|
||||
result?: {graph: GraphResult, moduleSystem: Array<Module>, polyfills: Array<Module>},
|
||||
) => {
|
||||
if (error) {
|
||||
callback(error);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
const {
|
||||
graph: {modules, entryModules},
|
||||
moduleSystem,
|
||||
polyfills,
|
||||
} = nullthrows(result);
|
||||
|
||||
const preludeScript = prelude(optimize);
|
||||
const prependedScripts = [preludeScript, ...moduleSystem, ...polyfills];
|
||||
callback(null, {
|
||||
entryModules,
|
||||
modules: concat(prependedScripts, modules),
|
||||
prependedScripts,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const getModules = (x, cb) => cb(null, x.modules);
|
||||
|
||||
function* concat<T>(...iterables: Array<Iterable<T>>): Iterable<T> {
|
||||
for (const it of iterables) {
|
||||
yield* it;
|
||||
}
|
||||
}
|
||||
|
||||
function prelude(optimize) {
|
||||
return virtualModule(
|
||||
`var __DEV__=${String(!optimize)},` +
|
||||
'__BUNDLE_START_TIME__=this.nativePerformanceNow?nativePerformanceNow():Date.now();'
|
||||
);
|
||||
}
|
|
@ -1,378 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest
|
||||
.disableAutomock()
|
||||
.useRealTimers()
|
||||
.mock('console');
|
||||
|
||||
const {Console} = require('console');
|
||||
const Graph = require('../Graph');
|
||||
const {fn} = require('../test-helpers');
|
||||
|
||||
const {any, objectContaining} = jasmine;
|
||||
const quiet = new Console();
|
||||
|
||||
describe('Graph:', () => {
|
||||
const anyEntry = ['arbitrary/entry/point'];
|
||||
const anyPlatform = 'arbitrary platform';
|
||||
const noOpts = undefined;
|
||||
|
||||
let graph, load, resolve;
|
||||
beforeEach(() => {
|
||||
load = fn();
|
||||
resolve = fn();
|
||||
resolve.stub.yields(null, 'arbitrary file');
|
||||
load.stub.yields(null, createFile('arbitrary file'), []);
|
||||
|
||||
graph = Graph.create(resolve, load);
|
||||
});
|
||||
|
||||
it('calls back an error when called without any entry point', done => {
|
||||
graph([], anyPlatform, {log: quiet}, error => {
|
||||
expect(error).toEqual(any(Error));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('resolves the entry point with the passed-in `resolve` function', done => {
|
||||
const entryPoint = '/arbitrary/path';
|
||||
graph([entryPoint], anyPlatform, noOpts, () => {
|
||||
expect(resolve).toBeCalledWith(
|
||||
entryPoint, null, any(String), any(Object), any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('allows to specify multiple entry points', done => {
|
||||
const entryPoints = ['Arbitrary', '../entry.js'];
|
||||
graph(entryPoints, anyPlatform, noOpts, () => {
|
||||
expect(resolve).toBeCalledWith(
|
||||
entryPoints[0], null, any(String), any(Object), any(Function));
|
||||
expect(resolve).toBeCalledWith(
|
||||
entryPoints[1], null, any(String), any(Object), any(Function));
|
||||
done();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('calls back with an error when called without `platform` option', done => {
|
||||
graph(anyEntry, undefined, {log: quiet}, error => {
|
||||
expect(error).toEqual(any(Error));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('forwards a passed-in `platform` to `resolve`', done => {
|
||||
const platform = 'any';
|
||||
graph(anyEntry, platform, noOpts, () => {
|
||||
expect(resolve).toBeCalledWith(
|
||||
any(String), null, platform, any(Object), any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('forwards a passed-in `log` option to `resolve`', done => {
|
||||
const log = new Console();
|
||||
graph(anyEntry, anyPlatform, {log}, () => {
|
||||
expect(resolve).toBeCalledWith(
|
||||
any(String), null, any(String), objectContaining({log}), any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls back with every error produced by `resolve`', done => {
|
||||
const error = Error();
|
||||
resolve.stub.yields(error);
|
||||
graph(anyEntry, anyPlatform, noOpts, e => {
|
||||
expect(e).toBe(error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('only calls back once if two parallel invocations of `resolve` fail', done => {
|
||||
load.stub.yields(null, createFile('with two deps'), ['depA', 'depB']);
|
||||
resolve.stub
|
||||
.withArgs('depA').yieldsAsync(new Error())
|
||||
.withArgs('depB').yieldsAsync(new Error());
|
||||
|
||||
let calls = 0;
|
||||
function callback() {
|
||||
if (calls === 0) {
|
||||
process.nextTick(() => {
|
||||
expect(calls).toEqual(1);
|
||||
done();
|
||||
});
|
||||
}
|
||||
++calls;
|
||||
}
|
||||
|
||||
graph(['entryA', 'entryB'], anyPlatform, noOpts, callback);
|
||||
});
|
||||
|
||||
it('passes the files returned by `resolve` on to the `load` function', done => {
|
||||
const modules = new Map([
|
||||
['Arbitrary', '/absolute/path/to/Arbitrary.js'],
|
||||
['../entry.js', '/whereever/is/entry.js'],
|
||||
]);
|
||||
for (const [id, file] of modules) {
|
||||
resolve.stub.withArgs(id).yields(null, file);
|
||||
}
|
||||
const [file1, file2] = modules.values();
|
||||
|
||||
graph(modules.keys(), anyPlatform, noOpts, () => {
|
||||
expect(load).toBeCalledWith(file1, any(Object), any(Function));
|
||||
expect(load).toBeCalledWith(file2, any(Object), any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('passes the `optimize` flag on to `load`', done => {
|
||||
graph(anyEntry, anyPlatform, {optimize: true}, () => {
|
||||
expect(load).toBeCalledWith(
|
||||
any(String), objectContaining({optimize: true}), any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('uses `false` as the default for the `optimize` flag', done => {
|
||||
graph(anyEntry, anyPlatform, noOpts, () => {
|
||||
expect(load).toBeCalledWith(
|
||||
any(String), objectContaining({optimize: false}), any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('forwards a passed-in `log` to `load`', done => {
|
||||
const log = new Console();
|
||||
graph(anyEntry, anyPlatform, {log}, () => {
|
||||
expect(load)
|
||||
.toBeCalledWith(any(String), objectContaining({log}), any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls back with every error produced by `load`', done => {
|
||||
const error = Error();
|
||||
load.stub.yields(error);
|
||||
graph(anyEntry, anyPlatform, noOpts, e => {
|
||||
expect(e).toBe(error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('resolves any dependencies provided by `load`', done => {
|
||||
const entryPath = '/path/to/entry.js';
|
||||
const id1 = 'required/id';
|
||||
const id2 = './relative/import';
|
||||
resolve.stub.withArgs('entry').yields(null, entryPath);
|
||||
load.stub.withArgs(entryPath)
|
||||
.yields(null, {path: entryPath}, [id1, id2]);
|
||||
|
||||
graph(['entry'], anyPlatform, noOpts, () => {
|
||||
expect(resolve).toBeCalledWith(
|
||||
id1, entryPath, any(String), any(Object), any(Function));
|
||||
expect(resolve).toBeCalledWith(
|
||||
id2, entryPath, any(String), any(Object), any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('loads transitive dependencies', done => {
|
||||
const entryPath = '/path/to/entry.js';
|
||||
const id1 = 'required/id';
|
||||
const id2 = './relative/import';
|
||||
const path1 = '/path/to/dep/1';
|
||||
const path2 = '/path/to/dep/2';
|
||||
|
||||
resolve.stub
|
||||
.withArgs(id1).yields(null, path1)
|
||||
.withArgs(id2).yields(null, path2)
|
||||
.withArgs('entry').yields(null, entryPath);
|
||||
load.stub
|
||||
.withArgs(entryPath).yields(null, {path: entryPath}, [id1])
|
||||
.withArgs(path1).yields(null, {path: path1}, [id2]);
|
||||
|
||||
graph(['entry'], anyPlatform, noOpts, () => {
|
||||
expect(resolve).toBeCalledWith(id2, path1, any(String), any(Object), any(Function));
|
||||
expect(load).toBeCalledWith(path1, any(Object), any(Function));
|
||||
expect(load).toBeCalledWith(path2, any(Object), any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('resolves modules in depth-first traversal order, regardless of the order of resolution',
|
||||
done => {
|
||||
load.stub.reset();
|
||||
resolve.stub.reset();
|
||||
|
||||
const ids = [
|
||||
'a',
|
||||
'b',
|
||||
'c', 'd',
|
||||
'e',
|
||||
'f', 'g',
|
||||
'h',
|
||||
];
|
||||
ids.forEach(id => {
|
||||
const path = idToPath(id);
|
||||
resolve.stub.withArgs(id).yields(null, path);
|
||||
load.stub.withArgs(path).yields(null, createFile(id), []);
|
||||
});
|
||||
load.stub.withArgs(idToPath('a')).yields(null, createFile('a'), ['b', 'e', 'h']);
|
||||
load.stub.withArgs(idToPath('b')).yields(null, createFile('b'), ['c', 'd']);
|
||||
load.stub.withArgs(idToPath('e')).yields(null, createFile('e'), ['f', 'g']);
|
||||
|
||||
// load certain ids later
|
||||
['b', 'e', 'h'].forEach(id => resolve.stub.withArgs(id).resetBehavior());
|
||||
resolve.stub.withArgs('h').func = (a, b, c, d, callback) => {
|
||||
callback(null, idToPath('h'));
|
||||
['e', 'b'].forEach(
|
||||
id => resolve.stub.withArgs(id).yield(null, idToPath(id)));
|
||||
};
|
||||
|
||||
graph(['a'], anyPlatform, noOpts, (error, result) => {
|
||||
expect(error).toEqual(null);
|
||||
expect(result.modules).toEqual([
|
||||
createModule('a', ['b', 'e', 'h']),
|
||||
createModule('b', ['c', 'd']),
|
||||
createModule('c'),
|
||||
createModule('d'),
|
||||
createModule('e', ['f', 'g']),
|
||||
createModule('f'),
|
||||
createModule('g'),
|
||||
createModule('h'),
|
||||
]);
|
||||
done();
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
it('calls back with the resolved modules of the entry points', done => {
|
||||
load.stub.reset();
|
||||
resolve.stub.reset();
|
||||
|
||||
load.stub.withArgs(idToPath('a')).yields(null, createFile('a'), ['b']);
|
||||
load.stub.withArgs(idToPath('b')).yields(null, createFile('b'), []);
|
||||
load.stub.withArgs(idToPath('c')).yields(null, createFile('c'), ['d']);
|
||||
load.stub.withArgs(idToPath('d')).yields(null, createFile('d'), []);
|
||||
|
||||
'abcd'.split('')
|
||||
.forEach(id => resolve.stub.withArgs(id).yields(null, idToPath(id)));
|
||||
|
||||
graph(['a', 'c'], anyPlatform, noOpts, (error, result) => {
|
||||
expect(result.entryModules).toEqual([
|
||||
createModule('a', ['b']),
|
||||
createModule('c', ['d']),
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('resolves modules for all entry points correctly if one is a dependency of another', done => {
|
||||
load.stub.reset();
|
||||
resolve.stub.reset();
|
||||
|
||||
load.stub.withArgs(idToPath('a')).yields(null, createFile('a'), ['b']);
|
||||
load.stub.withArgs(idToPath('b')).yields(null, createFile('b'), []);
|
||||
|
||||
'ab'.split('')
|
||||
.forEach(id => resolve.stub.withArgs(id).yields(null, idToPath(id)));
|
||||
|
||||
graph(['a', 'b'], anyPlatform, noOpts, (error, result) => {
|
||||
expect(result.entryModules).toEqual([
|
||||
createModule('a', ['b']),
|
||||
createModule('b', []),
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('does not include dependencies more than once', done => {
|
||||
const ids = ['a', 'b', 'c', 'd'];
|
||||
ids.forEach(id => {
|
||||
const path = idToPath(id);
|
||||
resolve.stub.withArgs(id).yields(null, path);
|
||||
load.stub.withArgs(path).yields(null, createFile(id), []);
|
||||
});
|
||||
['a', 'd'].forEach(id =>
|
||||
load.stub
|
||||
.withArgs(idToPath(id)).yields(null, createFile(id), ['b', 'c']));
|
||||
|
||||
graph(['a', 'd', 'b'], anyPlatform, noOpts, (error, result) => {
|
||||
expect(error).toEqual(null);
|
||||
expect(result.modules).toEqual([
|
||||
createModule('a', ['b', 'c']),
|
||||
createModule('b'),
|
||||
createModule('c'),
|
||||
createModule('d', ['b', 'c']),
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('handles dependency cycles', done => {
|
||||
resolve.stub
|
||||
.withArgs('a').yields(null, idToPath('a'))
|
||||
.withArgs('b').yields(null, idToPath('b'))
|
||||
.withArgs('c').yields(null, idToPath('c'));
|
||||
load.stub
|
||||
.withArgs(idToPath('a')).yields(null, createFile('a'), ['b'])
|
||||
.withArgs(idToPath('b')).yields(null, createFile('b'), ['c'])
|
||||
.withArgs(idToPath('c')).yields(null, createFile('c'), ['a']);
|
||||
|
||||
graph(['a'], anyPlatform, noOpts, (error, result) => {
|
||||
expect(result.modules).toEqual([
|
||||
createModule('a', ['b']),
|
||||
createModule('b', ['c']),
|
||||
createModule('c', ['a']),
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('can skip files', done => {
|
||||
['a', 'b', 'c', 'd', 'e'].forEach(
|
||||
id => resolve.stub.withArgs(id).yields(null, idToPath(id)));
|
||||
load.stub
|
||||
.withArgs(idToPath('a')).yields(null, createFile('a'), ['b', 'c', 'd'])
|
||||
.withArgs(idToPath('b')).yields(null, createFile('b'), ['e']);
|
||||
['c', 'd', 'e'].forEach(id =>
|
||||
load.stub.withArgs(idToPath(id)).yields(null, createFile(id), []));
|
||||
const skip = new Set([idToPath('b'), idToPath('c')]);
|
||||
|
||||
graph(['a'], anyPlatform, {skip}, (error, result) => {
|
||||
expect(result.modules).toEqual([
|
||||
createModule('a', ['b', 'c', 'd']),
|
||||
createModule('d', []),
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function createDependency(id) {
|
||||
return {id, path: idToPath(id)};
|
||||
}
|
||||
|
||||
function createFile(id) {
|
||||
return {ast: {}, path: idToPath(id)};
|
||||
}
|
||||
|
||||
function createModule(id, dependencies = []): Module {
|
||||
return {
|
||||
file: createFile(id),
|
||||
dependencies: dependencies.map(createDependency),
|
||||
};
|
||||
}
|
||||
|
||||
function idToPath(id) {
|
||||
return '/path/to/' + id;
|
||||
}
|
|
@ -1,109 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const ModuleGraph = require('../ModuleGraph');
|
||||
const defaults = require('../../defaults');
|
||||
|
||||
const FILE_TYPE = 'module';
|
||||
|
||||
describe('build setup', () => {
|
||||
const buildSetup = ModuleGraph.createBuildSetup(graph, mds => {
|
||||
return [...mds].sort((l, r) => l.file.path > r.file.path);
|
||||
});
|
||||
const noOptions = {};
|
||||
const noEntryPoints = [];
|
||||
|
||||
it('adds a prelude containing start time and `__DEV__` to the build', done => {
|
||||
buildSetup(noEntryPoints, noOptions, (error, result) => {
|
||||
expect(error).toEqual(null);
|
||||
|
||||
const [prelude] = result.modules;
|
||||
expect(prelude).toEqual({
|
||||
dependencies: [],
|
||||
file: {
|
||||
code: 'var __DEV__=true,__BUNDLE_START_TIME__=' +
|
||||
'this.nativePerformanceNow?nativePerformanceNow():Date.now();',
|
||||
map: null,
|
||||
path: '',
|
||||
type: 'script',
|
||||
},
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('sets `__DEV__` to false in the prelude if optimization is enabled', done => {
|
||||
buildSetup(noEntryPoints, {optimize: true}, (error, result) => {
|
||||
const [prelude] = result.modules;
|
||||
expect(prelude.file.code)
|
||||
.toEqual('var __DEV__=false,__BUNDLE_START_TIME__=' +
|
||||
'this.nativePerformanceNow?nativePerformanceNow():Date.now();');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('places the module system implementation directly after the prelude', done => {
|
||||
buildSetup(noEntryPoints, noOptions, (error, result) => {
|
||||
const [, moduleSystem] = result.modules;
|
||||
expect(moduleSystem).toEqual({
|
||||
dependencies: [],
|
||||
file: {
|
||||
code: '',
|
||||
path: defaults.moduleSystem,
|
||||
type: FILE_TYPE,
|
||||
},
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('places polyfills after the module system', done => {
|
||||
buildSetup(noEntryPoints, noOptions, (error, result) => {
|
||||
const polyfills =
|
||||
Array.from(result.modules).slice(2, 2 + defaults.polyfills.length);
|
||||
expect(polyfills).toEqual(defaults.polyfills.map(moduleFromPath));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('places all entry points and dependencies at the end, post-processed', done => {
|
||||
const entryPoints = ['b', 'c', 'd'];
|
||||
buildSetup(entryPoints, noOptions, (error, result) => {
|
||||
expect(Array.from(result.modules).slice(-4))
|
||||
.toEqual(['a', 'b', 'c', 'd'].map(moduleFromPath));
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function moduleFromPath(path) {
|
||||
return {
|
||||
dependencies: path === 'b' ? ['a'] : [],
|
||||
file: {
|
||||
code: '',
|
||||
path,
|
||||
type: FILE_TYPE,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function graph(entryPoints, platform, options, callback) {
|
||||
const modules = Array.from(entryPoints, moduleFromPath);
|
||||
const depModules = Array.prototype.concat.apply(
|
||||
[],
|
||||
modules.map(x => x.dependencies.map(moduleFromPath)),
|
||||
);
|
||||
callback(null, {
|
||||
entryModules: modules,
|
||||
modules: modules.concat(depModules),
|
||||
});
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
|
||||
import type {Module} from './types.flow';
|
||||
|
||||
exports.empty = (): Module => virtual('');
|
||||
|
||||
// creates a virtual module (i.e. not corresponding to a file on disk)
|
||||
// with the given source code.
|
||||
const virtual = exports.virtual = (code: string): Module => ({
|
||||
dependencies: [],
|
||||
file: {
|
||||
code,
|
||||
map: null,
|
||||
path: '',
|
||||
type: 'script',
|
||||
},
|
||||
});
|
|
@ -1,90 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const {dirname, join, parse} = require('path');
|
||||
|
||||
module.exports = class HasteFS {
|
||||
directories: Set<string>;
|
||||
directoryEntries: Map<string, Array<string>>;
|
||||
files: Set<string>;
|
||||
|
||||
constructor(files: Array<string>) {
|
||||
this.directories = buildDirectorySet(files);
|
||||
this.directoryEntries = buildDirectoryEntries(files.map(parse));
|
||||
this.files = new Set(files);
|
||||
}
|
||||
|
||||
closest(path: string, fileName: string): ?string {
|
||||
const parsedPath = parse(path);
|
||||
const root = parsedPath.root;
|
||||
let dir = parsedPath.dir;
|
||||
do {
|
||||
const candidate = join(dir, fileName);
|
||||
if (this.files.has(candidate)) {
|
||||
return candidate;
|
||||
}
|
||||
dir = dirname(dir);
|
||||
} while (dir !== '.' && dir !== root);
|
||||
return null;
|
||||
}
|
||||
|
||||
dirExists(path: string) {
|
||||
return this.directories.has(path);
|
||||
}
|
||||
|
||||
exists(path: string) {
|
||||
return this.files.has(path);
|
||||
}
|
||||
|
||||
getAllFiles() {
|
||||
return Array.from(this.files.keys());
|
||||
}
|
||||
|
||||
matchFiles() {
|
||||
throw new Error(
|
||||
'HasteFS.matchFiles is not implemented yet.'
|
||||
);
|
||||
}
|
||||
|
||||
matches(directory: string, pattern: RegExp) {
|
||||
const entries = this.directoryEntries.get(directory);
|
||||
return entries ? entries.filter(pattern.test, pattern) : [];
|
||||
}
|
||||
};
|
||||
|
||||
function buildDirectorySet(files) {
|
||||
const directories = new Set();
|
||||
files.forEach(path => {
|
||||
const parsedPath = parse(path);
|
||||
const root = parsedPath.root;
|
||||
let dir = parsedPath.dir;
|
||||
while (dir !== '.' && dir !== root && !directories.has(dir)) {
|
||||
directories.add(dir);
|
||||
dir = dirname(dir);
|
||||
}
|
||||
});
|
||||
return directories;
|
||||
}
|
||||
|
||||
function buildDirectoryEntries(files) {
|
||||
const directoryEntries = new Map();
|
||||
files.forEach(({base, dir}) => {
|
||||
const entries = directoryEntries.get(dir);
|
||||
if (entries) {
|
||||
entries.push(base);
|
||||
} else {
|
||||
directoryEntries.set(dir, [base]);
|
||||
}
|
||||
});
|
||||
return directoryEntries;
|
||||
}
|
|
@ -1,64 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import type {CachedReadResult, ReadResult} from '../../node-haste/Module';
|
||||
import type {TransformedCodeFile} from '../types.flow';
|
||||
import type ModuleCache from './ModuleCache';
|
||||
|
||||
module.exports = class Module {
|
||||
hasteID: ?string;
|
||||
moduleCache: ModuleCache;
|
||||
name: Promise<string>;
|
||||
path: string;
|
||||
type: 'Module';
|
||||
|
||||
constructor(
|
||||
path: string,
|
||||
moduleCache: ModuleCache,
|
||||
info: TransformedCodeFile,
|
||||
) {
|
||||
this.hasteID = info.hasteID;
|
||||
this.moduleCache = moduleCache;
|
||||
this.name = Promise.resolve(this.hasteID || getName(path));
|
||||
this.path = path;
|
||||
this.type = 'Module';
|
||||
}
|
||||
|
||||
readCached(): CachedReadResult {
|
||||
throw new Error('not implemented');
|
||||
}
|
||||
|
||||
readFresh(): Promise<ReadResult> {
|
||||
return Promise.reject(new Error('not implemented'));
|
||||
}
|
||||
|
||||
getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
getPackage() {
|
||||
return this.moduleCache.getPackageOf(this.path);
|
||||
}
|
||||
|
||||
isHaste() {
|
||||
return Boolean(this.hasteID);
|
||||
}
|
||||
|
||||
hash() {
|
||||
throw new Error('not implemented');
|
||||
}
|
||||
};
|
||||
|
||||
function getName(path) {
|
||||
return path.replace(/^.*[\/\\]node_modules[\///]/, '');
|
||||
}
|
|
@ -1,71 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const Module = require('./Module');
|
||||
const Package = require('./Package');
|
||||
|
||||
import type {PackageData, TransformedCodeFile} from '../types.flow';
|
||||
|
||||
type GetClosestPackageFn = (filePath: string) => ?string;
|
||||
|
||||
module.exports = class ModuleCache {
|
||||
_getClosestPackage: GetClosestPackageFn;
|
||||
getTransformedFile: string => TransformedCodeFile;
|
||||
modules: Map<string, Module>;
|
||||
packages: Map<string, Package>;
|
||||
|
||||
constructor(
|
||||
getClosestPackage: GetClosestPackageFn,
|
||||
getTransformedFile: string => TransformedCodeFile,
|
||||
) {
|
||||
this._getClosestPackage = getClosestPackage;
|
||||
this.getTransformedFile = getTransformedFile;
|
||||
this.modules = new Map();
|
||||
this.packages = new Map();
|
||||
}
|
||||
|
||||
getAssetModule(path: string): Module {
|
||||
return this.getModule(path);
|
||||
}
|
||||
|
||||
getModule(path: string): Module {
|
||||
let m = this.modules.get(path);
|
||||
if (!m) {
|
||||
m = new Module(path, this, this.getTransformedFile(path));
|
||||
this.modules.set(path, m);
|
||||
}
|
||||
return m;
|
||||
}
|
||||
|
||||
getPackage(path: string): Package {
|
||||
let p = this.packages.get(path);
|
||||
if (!p) {
|
||||
p = new Package(path, this.getPackageData(path));
|
||||
this.packages.set(path, p);
|
||||
}
|
||||
return p;
|
||||
}
|
||||
|
||||
getPackageData(path: string): PackageData {
|
||||
const pkg = this.getTransformedFile(path).package;
|
||||
if (!pkg) {
|
||||
throw new Error(`"${path}" does not exist`);
|
||||
}
|
||||
return pkg;
|
||||
}
|
||||
|
||||
getPackageOf(filePath: string): ?Package {
|
||||
const candidate = this._getClosestPackage(filePath);
|
||||
return candidate != null ? this.getPackage(candidate) : null;
|
||||
}
|
||||
};
|
|
@ -1,135 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const nullthrows = require('fbjs/lib/nullthrows');
|
||||
const path = require('path');
|
||||
|
||||
import type {PackageData} from '../types.flow';
|
||||
|
||||
module.exports = class Package {
|
||||
data: PackageData;
|
||||
path: string;
|
||||
root: string;
|
||||
type: 'Package';
|
||||
|
||||
constructor(packagePath: string, data: PackageData) {
|
||||
this.data = data;
|
||||
this.path = packagePath;
|
||||
this.root = path.dirname(packagePath);
|
||||
this.type = 'Package';
|
||||
}
|
||||
|
||||
getMain() {
|
||||
// Copied from node-haste/Package.js
|
||||
const replacements = getReplacements(this.data);
|
||||
if (typeof replacements === 'string') {
|
||||
return path.join(this.root, replacements);
|
||||
}
|
||||
|
||||
let main = getMain(this.data);
|
||||
|
||||
if (replacements && typeof replacements === 'object') {
|
||||
main = replacements[main] ||
|
||||
replacements[main + '.js'] ||
|
||||
replacements[main + '.json'] ||
|
||||
replacements[main.replace(/(\.js|\.json)$/, '')] ||
|
||||
main;
|
||||
}
|
||||
|
||||
return path.join(this.root, main);
|
||||
}
|
||||
|
||||
getName(): Promise<string> {
|
||||
return Promise.resolve(nullthrows(this.data.name));
|
||||
}
|
||||
|
||||
isHaste(): boolean {
|
||||
return !!this.data.name;
|
||||
}
|
||||
|
||||
redirectRequire(name: string) {
|
||||
// Copied from node-haste/Package.js
|
||||
const replacements = getReplacements(this.data);
|
||||
|
||||
if (!replacements || typeof replacements !== 'object') {
|
||||
return name;
|
||||
}
|
||||
|
||||
if (!path.isAbsolute(name)) {
|
||||
const replacement = replacements[name];
|
||||
// support exclude with "someDependency": false
|
||||
return replacement === false
|
||||
? false
|
||||
: replacement || name;
|
||||
}
|
||||
|
||||
let relPath = './' + path.relative(this.root, name);
|
||||
if (path.sep !== '/') {
|
||||
relPath = relPath.replace(new RegExp('\\' + path.sep, 'g'), '/');
|
||||
}
|
||||
|
||||
let redirect = replacements[relPath];
|
||||
|
||||
// false is a valid value
|
||||
if (redirect == null) {
|
||||
redirect = replacements[relPath + '.js'];
|
||||
if (redirect == null) {
|
||||
redirect = replacements[relPath + '.json'];
|
||||
}
|
||||
}
|
||||
|
||||
// support exclude with "./someFile": false
|
||||
if (redirect === false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (redirect) {
|
||||
return path.join(
|
||||
this.root,
|
||||
redirect
|
||||
);
|
||||
}
|
||||
|
||||
return name;
|
||||
}
|
||||
};
|
||||
|
||||
function getMain(pkg) {
|
||||
return pkg.main || 'index';
|
||||
}
|
||||
|
||||
// Copied from node-haste/Package.js
|
||||
function getReplacements(pkg) {
|
||||
let rn = pkg['react-native'];
|
||||
let browser = pkg.browser;
|
||||
if (rn == null) {
|
||||
return browser;
|
||||
}
|
||||
|
||||
if (browser == null) {
|
||||
return rn;
|
||||
}
|
||||
|
||||
const main = getMain(pkg);
|
||||
if (typeof rn !== 'object') {
|
||||
rn = {[main]: rn};
|
||||
}
|
||||
|
||||
if (typeof browser !== 'object') {
|
||||
browser = {[main]: browser};
|
||||
}
|
||||
|
||||
// merge with "browser" as default,
|
||||
// "react-native" as override
|
||||
return {...browser, ...rn};
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
'use strict';
|
||||
|
||||
import DependencyGraphHelpers from '../../node-haste/DependencyGraph/DependencyGraphHelpers';
|
||||
|
||||
type ModuleID = string;
|
||||
export type Path = string;
|
||||
type Platform = string;
|
||||
type Platforms = Set<Platform>;
|
||||
|
||||
export type Extensions = Array<string>;
|
||||
|
||||
export type Module = {
|
||||
path: Path,
|
||||
type: 'Module',
|
||||
getName(): Promise<ModuleID>,
|
||||
getPackage(): ?Package,
|
||||
isHaste(): Promise<boolean>,
|
||||
};
|
||||
|
||||
export type Package = {
|
||||
path: Path,
|
||||
root: Path,
|
||||
type: 'Package',
|
||||
getMain(): Path,
|
||||
getName(): Promise<ModuleID>,
|
||||
isHaste(): Promise<boolean>,
|
||||
redirectRequire(id: ModuleID): Path | false,
|
||||
};
|
||||
|
||||
export type ModuleCache = {
|
||||
getAssetModule(path: Path): Module,
|
||||
getModule(path: Path): Module,
|
||||
getPackage(path: Path): Package,
|
||||
getPackageOf(path: Path): ?Package,
|
||||
}
|
||||
|
||||
export type FastFS = {
|
||||
dirExists(path: Path): boolean,
|
||||
closest(path: string, fileName: string): ?string,
|
||||
fileExists(path: Path): boolean,
|
||||
getAllFiles(): Array<Path>,
|
||||
matches(directory: Path, pattern: RegExp): Array<Path>,
|
||||
};
|
||||
|
||||
type HasteMapOptions = {|
|
||||
extensions: Extensions,
|
||||
files: Array<string>,
|
||||
helpers: DependencyGraphHelpers,
|
||||
moduleCache: ModuleCache,
|
||||
platforms: Platforms,
|
||||
preferNativePlatform: true,
|
||||
|};
|
||||
|
||||
declare class HasteMap {
|
||||
// node-haste/DependencyGraph/HasteMap.js
|
||||
build(): Promise<Object>,
|
||||
constructor(options: HasteMapOptions): void,
|
||||
}
|
|
@ -1,149 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import type { // eslint-disable-line sort-requires
|
||||
Extensions,
|
||||
Path,
|
||||
} from './node-haste.flow';
|
||||
|
||||
import type {
|
||||
ResolveFn,
|
||||
TransformedCodeFile,
|
||||
} from '../types.flow';
|
||||
|
||||
const AssetResolutionCache = require('../../node-haste/AssetResolutionCache');
|
||||
const DependencyGraphHelpers = require('../../node-haste/DependencyGraph/DependencyGraphHelpers');
|
||||
const FilesByDirNameIndex = require('../../node-haste/FilesByDirNameIndex');
|
||||
const HasteFS = require('./HasteFS');
|
||||
const HasteMap = require('../../node-haste/DependencyGraph/HasteMap');
|
||||
const Module = require('./Module');
|
||||
const ModuleCache = require('./ModuleCache');
|
||||
const ResolutionRequest = require('../../node-haste/DependencyGraph/ResolutionRequest');
|
||||
|
||||
const defaults = require('../../defaults');
|
||||
|
||||
import type {Moduleish, Packageish} from '../../node-haste/DependencyGraph/ResolutionRequest';
|
||||
|
||||
type ResolveOptions = {|
|
||||
assetExts: Extensions,
|
||||
extraNodeModules: {[id: string]: string},
|
||||
+sourceExts: Extensions,
|
||||
transformedFiles: {[path: Path]: TransformedCodeFile},
|
||||
|};
|
||||
|
||||
const platforms = new Set(defaults.platforms);
|
||||
|
||||
/**
|
||||
* We don't need to crawl the filesystem all over again so we just mock
|
||||
* a jest-haste-map's ModuleMap instance. Eventually, though, we'll
|
||||
* want to figure out how to reunify and get rid of `HasteMap`.
|
||||
*/
|
||||
function getFakeModuleMap(hasteMap: HasteMap<Module, Packageish>) {
|
||||
return {
|
||||
getModule(name: string, platform: ?string): ?string {
|
||||
const module = hasteMap.getModule(name, platform);
|
||||
return module && module.type === 'Module' ? module.path : null;
|
||||
},
|
||||
getPackage(name: string, platform: ?string): ?string {
|
||||
const pkg = hasteMap.getPackage(name);
|
||||
return pkg && pkg.path;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const nullModule: Moduleish = {
|
||||
path: '/',
|
||||
getPackage() {},
|
||||
hash() {
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
readCached() { throw new Error('not implemented'); },
|
||||
readFresh() { return Promise.reject(new Error('not implemented')); },
|
||||
isHaste() { throw new Error('not implemented'); },
|
||||
getName() { throw new Error('not implemented'); },
|
||||
};
|
||||
|
||||
exports.createResolveFn = function(options: ResolveOptions): ResolveFn {
|
||||
const {
|
||||
assetExts,
|
||||
extraNodeModules,
|
||||
transformedFiles,
|
||||
sourceExts,
|
||||
} = options;
|
||||
const files = Object.keys(transformedFiles);
|
||||
function getTransformedFile(path) {
|
||||
const result = transformedFiles[path];
|
||||
if (!result) {
|
||||
throw new Error(`"${path} does not exist`);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const helpers = new DependencyGraphHelpers({
|
||||
assetExts,
|
||||
providesModuleNodeModules: defaults.providesModuleNodeModules,
|
||||
});
|
||||
|
||||
const hasteFS = new HasteFS(files);
|
||||
const moduleCache = new ModuleCache(
|
||||
filePath => hasteFS.closest(filePath, 'package.json'),
|
||||
getTransformedFile,
|
||||
);
|
||||
const hasteMap = new HasteMap({
|
||||
extensions: sourceExts,
|
||||
files,
|
||||
helpers,
|
||||
moduleCache,
|
||||
platforms,
|
||||
preferNativePlatform: true,
|
||||
});
|
||||
|
||||
const hasteMapBuilt = hasteMap.build();
|
||||
const resolutionRequests = {};
|
||||
const filesByDirNameIndex = new FilesByDirNameIndex(hasteMap.getAllFiles());
|
||||
const assetResolutionCache = new AssetResolutionCache({
|
||||
assetExtensions: new Set(assetExts),
|
||||
getDirFiles: dirPath => filesByDirNameIndex.getAllFiles(dirPath),
|
||||
platforms,
|
||||
});
|
||||
return (id, source, platform, _, callback) => {
|
||||
let resolutionRequest = resolutionRequests[platform];
|
||||
if (!resolutionRequest) {
|
||||
resolutionRequest = resolutionRequests[platform] = new ResolutionRequest({
|
||||
dirExists: filePath => hasteFS.dirExists(filePath),
|
||||
entryPath: '',
|
||||
extraNodeModules,
|
||||
hasteFS,
|
||||
helpers,
|
||||
moduleCache,
|
||||
moduleMap: getFakeModuleMap(hasteMap),
|
||||
platform,
|
||||
preferNativePlatform: true,
|
||||
resolveAsset: (dirPath, assetName) =>
|
||||
assetResolutionCache.resolve(dirPath, assetName, platform),
|
||||
sourceExts,
|
||||
});
|
||||
}
|
||||
|
||||
const from = source != null
|
||||
? new Module(source, moduleCache, getTransformedFile(source))
|
||||
: nullModule;
|
||||
hasteMapBuilt
|
||||
.then(() => resolutionRequest.resolveDependency(from, id))
|
||||
.then(
|
||||
// nextTick to escape promise error handling
|
||||
module => process.nextTick(callback, null, module.path),
|
||||
error => process.nextTick(callback, error),
|
||||
);
|
||||
};
|
||||
};
|
|
@ -1 +0,0 @@
|
|||
{"main":"node-haste.js"}
|
|
@ -1,297 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
declare var jest: any;
|
||||
jest.disableAutomock();
|
||||
|
||||
const indexedRamBundle = require('../indexed-ram-bundle');
|
||||
const {addModuleIdsToModuleWrapper} = require('../util');
|
||||
|
||||
declare var describe: any;
|
||||
declare var expect: any;
|
||||
declare var it: (string, () => ?Promise<any>) => void;
|
||||
declare var beforeAll: (() => ?Promise<any>) => void;
|
||||
|
||||
let code: Buffer;
|
||||
let map;
|
||||
let ids, modules, requireCall;
|
||||
const idForPath = ({path}) => getId(path);
|
||||
beforeAll(() => {
|
||||
modules = [
|
||||
makeModule('a', [], 'script'),
|
||||
makeModule('b', ['c']),
|
||||
makeModule('c', ['f']),
|
||||
makeModule('d', ['e']),
|
||||
makeModule('e'),
|
||||
makeModule('f'),
|
||||
];
|
||||
requireCall = makeModule('r', [], 'script', 'require(1);');
|
||||
|
||||
ids = new Map(modules.map(({file}, i) => [file.path, i]));
|
||||
({code, map} = createRamBundle());
|
||||
});
|
||||
|
||||
it('starts the bundle file with the magic number', () => {
|
||||
expect(code.readUInt32LE(0)).toBe(0xFB0BD1E5);
|
||||
});
|
||||
|
||||
it('contains the number of modules in the module table', () => {
|
||||
expect(code.readUInt32LE(SIZEOF_INT32)).toBe(modules.length);
|
||||
});
|
||||
|
||||
it('has the length correct of the startup section', () => {
|
||||
expect(code.readUInt32LE(SIZEOF_INT32 * 2))
|
||||
.toBe(requireCall.file.code.length + 1);
|
||||
});
|
||||
|
||||
it('contains the code after the offset table', () => {
|
||||
const {codeOffset, startupSectionLength, table} = parseOffsetTable(code);
|
||||
|
||||
const startupSection =
|
||||
code.slice(codeOffset, codeOffset + startupSectionLength - 1);
|
||||
expect(startupSection.toString()).toBe(requireCall.file.code);
|
||||
|
||||
table.forEach(([offset, length], i) => {
|
||||
const moduleCode =
|
||||
code.slice(codeOffset + offset, codeOffset + offset + length - 1);
|
||||
expect(moduleCode.toString()).toBe(expectedCode(modules[i]));
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a source map', () => {
|
||||
let line = countLines(requireCall);
|
||||
expect(map.sections.slice(1)).toEqual(modules.map(m => {
|
||||
const section = {
|
||||
map: m.file.map || lineByLineMap(m.file.path),
|
||||
offset: {column: 0, line},
|
||||
};
|
||||
line += countLines(m);
|
||||
return section;
|
||||
}));
|
||||
expect(map.x_facebook_offsets).toEqual([1, 2, 3, 4, 5, 6]);
|
||||
});
|
||||
|
||||
describe('Startup section optimization', () => {
|
||||
let last, preloaded;
|
||||
beforeAll(() => {
|
||||
last = modules[modules.length - 1];
|
||||
preloaded = [modules[2], modules[3], last];
|
||||
({code, map} = createRamBundle(new Set(preloaded.map(getPath))));
|
||||
});
|
||||
|
||||
it('supports additional modules in the startup section', () => {
|
||||
const {codeOffset, startupSectionLength, table} = parseOffsetTable(code);
|
||||
|
||||
const startupSection =
|
||||
code.slice(codeOffset, codeOffset + startupSectionLength - 1);
|
||||
expect(startupSection.toString())
|
||||
.toBe(preloaded.concat([requireCall]).map(expectedCode).join('\n'));
|
||||
|
||||
|
||||
preloaded.forEach(m => {
|
||||
const idx = idForPath(m.file);
|
||||
expect(table[idx]).toEqual(m === last ? undefined : [0, 0]);
|
||||
});
|
||||
|
||||
table.forEach(([offset, length], i) => {
|
||||
if (offset !== 0 && length !== 0) {
|
||||
const moduleCode =
|
||||
code.slice(codeOffset + offset, codeOffset + offset + length - 1);
|
||||
expect(moduleCode.toString()).toBe(expectedCode(modules[i]));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('reflects additional sources in the startup section in the source map', () => {
|
||||
let line = preloaded.reduce(
|
||||
(l, m) => l + countLines(m),
|
||||
countLines(requireCall),
|
||||
);
|
||||
|
||||
expect(map.x_facebook_offsets).toEqual([4, 5,,, 6]); // eslint-disable-line no-sparse-arrays
|
||||
|
||||
expect(map.sections.slice(1)).toEqual(
|
||||
modules
|
||||
.filter(not(Set.prototype.has), new Set(preloaded))
|
||||
.map(m => {
|
||||
const section = {
|
||||
map: m.file.map || lineByLineMap(m.file.path),
|
||||
offset: {column: 0, line},
|
||||
};
|
||||
line += countLines(m);
|
||||
return section;
|
||||
}
|
||||
));
|
||||
});
|
||||
});
|
||||
|
||||
describe('RAM groups / common sections', () => {
|
||||
let groups, groupHeads;
|
||||
beforeAll(() => {
|
||||
groups = [
|
||||
[modules[1], modules[2], modules[5]],
|
||||
[modules[3], modules[4]],
|
||||
];
|
||||
groupHeads = groups.map(g => g[0]);
|
||||
({code, map} = createRamBundle(undefined, groupHeads.map(getPath)));
|
||||
});
|
||||
|
||||
it('supports grouping the transitive dependencies of files into common sections', () => {
|
||||
const {codeOffset, table} = parseOffsetTable(code);
|
||||
|
||||
groups.forEach(group => {
|
||||
const [head, ...deps] = group.map(x => idForPath(x.file));
|
||||
const groupEntry = table[head];
|
||||
deps.forEach(id => expect(table[id]).toEqual(groupEntry));
|
||||
|
||||
const [offset, length] = groupEntry;
|
||||
const groupCode = code.slice(codeOffset + offset, codeOffset + offset + length - 1);
|
||||
expect(groupCode.toString())
|
||||
.toEqual(group.map(expectedCode).join('\n'));
|
||||
});
|
||||
});
|
||||
|
||||
it('reflects section groups in the source map', () => {
|
||||
expect(map.x_facebook_offsets).toEqual([1, 2, 2, 5, 5, 2]);
|
||||
const maps = map.sections.slice(-2);
|
||||
const toplevelOffsets = [2, 5];
|
||||
|
||||
maps.map((groupMap, i) => [groups[i], groupMap]).forEach(([group, groupMap], i) => {
|
||||
const offsets = group.reduce(moduleLineOffsets, [])[0];
|
||||
expect(groupMap).toEqual({
|
||||
map: {
|
||||
version: 3,
|
||||
sections: group.map((module, j) => ({
|
||||
map: module.file.map,
|
||||
offset: {line: offsets[j], column: 0},
|
||||
})),
|
||||
},
|
||||
offset: {line: toplevelOffsets[i], column: 0},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function moduleLineOffsets([offsets = [], line = 0], module) {
|
||||
return [[...offsets, line], line + countLines(module)];
|
||||
}
|
||||
});
|
||||
|
||||
function createRamBundle(preloadedModules = new Set(), ramGroups) {
|
||||
const build = indexedRamBundle.createBuilder(preloadedModules, ramGroups);
|
||||
const result = build({
|
||||
filename: 'arbitrary/filename.js',
|
||||
idForPath,
|
||||
modules,
|
||||
requireCalls: [requireCall],
|
||||
});
|
||||
|
||||
if (typeof result.code === 'string') {
|
||||
throw new Error('Expected a buffer, not a string');
|
||||
}
|
||||
return {code: result.code, map: result.map};
|
||||
}
|
||||
|
||||
function makeModule(name, deps = [], type = 'module', moduleCode = `var ${name};`) {
|
||||
const path = makeModulePath(name);
|
||||
return {
|
||||
dependencies: deps.map(makeDependency),
|
||||
file: {
|
||||
code: type === 'module' ? makeModuleCode(moduleCode) : moduleCode,
|
||||
map: type !== 'module'
|
||||
? null
|
||||
: makeModuleMap(name, path),
|
||||
path,
|
||||
type,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function makeModuleMap(name, path) {
|
||||
return {
|
||||
version: 3,
|
||||
mappings: Array(parseInt(name, 36) + 1).join(','),
|
||||
names: [name],
|
||||
sources: [path],
|
||||
};
|
||||
}
|
||||
|
||||
function makeModuleCode(moduleCode) {
|
||||
return `__d(() => {${moduleCode}})`;
|
||||
}
|
||||
|
||||
function makeModulePath(name) {
|
||||
return `/${name}.js`;
|
||||
}
|
||||
|
||||
function makeDependency(name) {
|
||||
const path = makeModulePath(name);
|
||||
return {
|
||||
id: name,
|
||||
path,
|
||||
};
|
||||
}
|
||||
|
||||
function expectedCode(module) {
|
||||
const {file} = module;
|
||||
return file.type === 'module'
|
||||
? addModuleIdsToModuleWrapper(module, idForPath)
|
||||
: file.code;
|
||||
}
|
||||
|
||||
function getId(path) {
|
||||
if (path === requireCall.file.path) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
const id = ids.get(path);
|
||||
if (id == null) {
|
||||
throw new Error(`Unknown file: ${path}`);
|
||||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
function getPath(module) {
|
||||
return module.file.path;
|
||||
}
|
||||
|
||||
const SIZEOF_INT32 = 4;
|
||||
function parseOffsetTable(buffer) {
|
||||
const n = buffer.readUInt32LE(SIZEOF_INT32);
|
||||
const startupSectionLength = buffer.readUInt32LE(SIZEOF_INT32 * 2);
|
||||
const baseOffset = SIZEOF_INT32 * 3;
|
||||
const table = Array(n);
|
||||
for (let i = 0; i < n; ++i) {
|
||||
const offset = baseOffset + i * 2 * SIZEOF_INT32;
|
||||
table[i] = [buffer.readUInt32LE(offset), buffer.readUInt32LE(offset + SIZEOF_INT32)];
|
||||
}
|
||||
return {
|
||||
codeOffset: baseOffset + n * 2 * SIZEOF_INT32,
|
||||
startupSectionLength,
|
||||
table,
|
||||
};
|
||||
}
|
||||
|
||||
function countLines(module) {
|
||||
return module.file.code.split('\n').length;
|
||||
}
|
||||
|
||||
function lineByLineMap(file) {
|
||||
return {
|
||||
file,
|
||||
mappings: 'AAAA;',
|
||||
names: [],
|
||||
sources: [file],
|
||||
version: 3,
|
||||
};
|
||||
}
|
||||
|
||||
const not = fn => function() { return !fn.apply(this, arguments); };
|
|
@ -1,83 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const {match} = require('sinon');
|
||||
const {fn} = require('../../test-helpers');
|
||||
const {
|
||||
addModuleIdsToModuleWrapper,
|
||||
createIdForPathFn,
|
||||
} = require('../util');
|
||||
|
||||
const {any} = jasmine;
|
||||
|
||||
describe('`addModuleIdsToModuleWrapper`:', () => {
|
||||
const path = 'path/to/file';
|
||||
const createModule = (dependencies = []) => ({
|
||||
dependencies,
|
||||
file: {code: '__d(function(){});', isModule: true, path},
|
||||
});
|
||||
|
||||
it('completes the module wrapped with module ID, and an array of dependency IDs', () => {
|
||||
const dependencies = [
|
||||
{id: 'a', path: 'path/to/a.js'},
|
||||
{id: 'b', path: 'location/of/b.js'},
|
||||
];
|
||||
const module = createModule(dependencies);
|
||||
|
||||
const idForPath = fn();
|
||||
idForPath.stub
|
||||
.withArgs(match({path})).returns(12)
|
||||
.withArgs(match({path: dependencies[0].path})).returns(345)
|
||||
.withArgs(match({path: dependencies[1].path})).returns(6);
|
||||
|
||||
expect(addModuleIdsToModuleWrapper(module, idForPath))
|
||||
.toEqual('__d(function(){},12,[345,6]);');
|
||||
});
|
||||
|
||||
it('omits the array of dependency IDs if it is empty', () => {
|
||||
const module = createModule();
|
||||
expect(addModuleIdsToModuleWrapper(module, () => 98))
|
||||
.toEqual(`__d(function(){},${98});`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('`createIdForPathFn`', () => {
|
||||
let idForPath;
|
||||
beforeEach(() => {
|
||||
idForPath = createIdForPathFn();
|
||||
});
|
||||
|
||||
it('returns a number for a string', () => {
|
||||
expect(idForPath({path: 'arbitrary'})).toEqual(any(Number));
|
||||
});
|
||||
|
||||
it('returns consecutive numbers', () => {
|
||||
const strings = [
|
||||
'arbitrary string',
|
||||
'looking/like/a/path',
|
||||
'/absolute/path/to/file.js',
|
||||
'/more files/are here',
|
||||
];
|
||||
|
||||
strings.forEach((string, i) => {
|
||||
expect(idForPath({path: string})).toEqual(i);
|
||||
});
|
||||
});
|
||||
|
||||
it('returns the same id if the same string is passed in again', () => {
|
||||
const path = 'this/is/an/arbitrary/path.js';
|
||||
const id = idForPath({path});
|
||||
idForPath({path: '/other/file'});
|
||||
idForPath({path: 'and/another/file'});
|
||||
expect(idForPath({path})).toEqual(id);
|
||||
});
|
||||
});
|
|
@ -1,106 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const buildSourceMapWithMetaData = require('../../shared/output/unbundle/build-unbundle-sourcemap-with-metadata.js');
|
||||
const nullthrows = require('fbjs/lib/nullthrows');
|
||||
|
||||
const {createRamBundleGroups} = require('../../Bundler/util');
|
||||
const {buildTableAndContents, createModuleGroups} = require('../../shared/output/unbundle/as-indexed-file');
|
||||
const {addModuleIdsToModuleWrapper, concat} = require('./util');
|
||||
|
||||
import type {FBIndexMap} from '../../lib/SourceMap.js';
|
||||
import type {OutputFn} from '../types.flow';
|
||||
|
||||
function asIndexedRamBundle({
|
||||
filename,
|
||||
idForPath,
|
||||
modules,
|
||||
preloadedModules,
|
||||
ramGroupHeads,
|
||||
requireCalls,
|
||||
}) {
|
||||
const [startup, deferred] = partition(modules, preloadedModules);
|
||||
const startupModules = Array.from(concat(startup, requireCalls));
|
||||
const deferredModules = deferred.map(m => toModuleTransport(m, idForPath));
|
||||
const ramGroups = createRamBundleGroups(ramGroupHeads || [], deferredModules, subtree);
|
||||
const moduleGroups = createModuleGroups(ramGroups, deferredModules);
|
||||
|
||||
const tableAndContents = buildTableAndContents(
|
||||
startupModules.map(m => getModuleCode(m, idForPath)).join('\n'),
|
||||
deferredModules,
|
||||
moduleGroups,
|
||||
'utf8',
|
||||
);
|
||||
|
||||
return {
|
||||
code: Buffer.concat(tableAndContents),
|
||||
map: buildSourceMapWithMetaData({
|
||||
fixWrapperOffset: false,
|
||||
lazyModules: deferredModules,
|
||||
moduleGroups,
|
||||
startupModules: startupModules.map(m => toModuleTransport(m, idForPath)),
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
function toModuleTransport(module, idForPath) {
|
||||
const {dependencies, file} = module;
|
||||
return {
|
||||
code: getModuleCode(module, idForPath),
|
||||
dependencies,
|
||||
id: idForPath(file),
|
||||
map: file.map,
|
||||
name: file.path,
|
||||
sourcePath: file.path,
|
||||
};
|
||||
}
|
||||
|
||||
function getModuleCode(module, idForPath) {
|
||||
const {file} = module;
|
||||
return file.type === 'module'
|
||||
? addModuleIdsToModuleWrapper(module, idForPath)
|
||||
: file.code;
|
||||
}
|
||||
|
||||
function partition(modules, preloadedModules) {
|
||||
const startup = [];
|
||||
const deferred = [];
|
||||
for (const module of modules) {
|
||||
(preloadedModules.has(module.file.path) ? startup : deferred).push(module);
|
||||
}
|
||||
|
||||
return [startup, deferred];
|
||||
}
|
||||
|
||||
function *subtree(
|
||||
moduleTransport,
|
||||
moduleTransportsByPath,
|
||||
seen = new Set(),
|
||||
) {
|
||||
seen.add(moduleTransport.id);
|
||||
for (const {path} of moduleTransport.dependencies) {
|
||||
const dependency = nullthrows(moduleTransportsByPath.get(path));
|
||||
if (!seen.has(dependency.id)) {
|
||||
yield dependency.id;
|
||||
yield *subtree(dependency, moduleTransportsByPath, seen);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createBuilder(
|
||||
preloadedModules: Set<string>,
|
||||
ramGroupHeads: ?$ReadOnlyArray<string>,
|
||||
): OutputFn<FBIndexMap> {
|
||||
return x => asIndexedRamBundle({...x, preloadedModules, ramGroupHeads});
|
||||
}
|
||||
|
||||
exports.createBuilder = createBuilder;
|
|
@ -1,64 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const meta = require('../../shared/output/meta');
|
||||
|
||||
const {createIndexMap} = require('./source-map');
|
||||
const {addModuleIdsToModuleWrapper, concat} = require('./util');
|
||||
|
||||
import type {OutputFn} from '../types.flow';
|
||||
|
||||
function asPlainBundle({
|
||||
filename,
|
||||
idForPath,
|
||||
modules,
|
||||
requireCalls,
|
||||
sourceMapPath,
|
||||
}) {
|
||||
let code = '';
|
||||
let line = 0;
|
||||
const sections = [];
|
||||
|
||||
for (const module of concat(modules, requireCalls)) {
|
||||
const {file} = module;
|
||||
const moduleCode = file.type === 'module'
|
||||
? addModuleIdsToModuleWrapper(module, idForPath)
|
||||
: file.code;
|
||||
|
||||
code += moduleCode + '\n';
|
||||
if (file.map) {
|
||||
sections.push({
|
||||
map: file.map,
|
||||
offset: {column: 0, line},
|
||||
});
|
||||
}
|
||||
line += countLines(moduleCode);
|
||||
}
|
||||
|
||||
if (sourceMapPath) {
|
||||
code += `//# sourceMappingURL=${sourceMapPath}`;
|
||||
}
|
||||
|
||||
return {
|
||||
code,
|
||||
extraFiles: [[`${filename}.meta`, meta(code)]],
|
||||
map: createIndexMap({file: filename, sections}),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = (asPlainBundle: OutputFn<>);
|
||||
|
||||
const reLine = /^/gm;
|
||||
function countLines(string: string): number {
|
||||
//$FlowFixMe This regular expression always matches
|
||||
return string.match(reLine).length;
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
import type {FBSourceMap, IndexMapSection, IndexMap} from '../../lib/SourceMap';
|
||||
|
||||
export type {FBSourceMap};
|
||||
|
||||
type CreateIndexMapOptions = {|
|
||||
file?: string,
|
||||
sections?: Array<IndexMapSection>
|
||||
|};
|
||||
|
||||
exports.createIndexMap = (opts?: CreateIndexMapOptions): IndexMap => ({
|
||||
version: 3,
|
||||
file: opts && opts.file,
|
||||
sections: opts && opts.sections || [],
|
||||
});
|
|
@ -1,81 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const virtualModule = require('../module').virtual;
|
||||
|
||||
import type {IdForPathFn, Module} from '../types.flow';
|
||||
|
||||
// Transformed modules have the form
|
||||
// __d(function(require, module, global, exports, dependencyMap) {
|
||||
// /* code */
|
||||
// });
|
||||
//
|
||||
// This function adds the numeric module ID, and an array with dependencies of
|
||||
// the dependencies of the module before the closing parenthesis.
|
||||
exports.addModuleIdsToModuleWrapper = (
|
||||
module: Module,
|
||||
idForPath: {path: string} => number,
|
||||
): string => {
|
||||
const {dependencies, file} = module;
|
||||
const {code} = file;
|
||||
const index = code.lastIndexOf(')');
|
||||
|
||||
// calling `idForPath` on the module itself first gives us a lower module id
|
||||
// for the file itself than for its dependencies. That reflects their order
|
||||
// in the bundle.
|
||||
const fileId = idForPath(file);
|
||||
|
||||
// This code runs for both development and production builds, after
|
||||
// minification. That's why we leave out all spaces.
|
||||
const depencyIds =
|
||||
dependencies.length ? `,[${dependencies.map(idForPath).join(',')}]` : '';
|
||||
return (
|
||||
code.slice(0, index) +
|
||||
`,${fileId}` +
|
||||
depencyIds +
|
||||
code.slice(index)
|
||||
);
|
||||
};
|
||||
|
||||
exports.concat = function* concat<T>(
|
||||
...iterables: Array<Iterable<T>>
|
||||
): Iterable<T> {
|
||||
for (const it of iterables) {
|
||||
yield* it;
|
||||
}
|
||||
};
|
||||
|
||||
// Creates an idempotent function that returns numeric IDs for objects based
|
||||
// on their `path` property.
|
||||
exports.createIdForPathFn = (): ({path: string} => number) => {
|
||||
const seen = new Map();
|
||||
let next = 0;
|
||||
return ({path}) => {
|
||||
let id = seen.get(path);
|
||||
if (id == null) {
|
||||
id = next++;
|
||||
seen.set(path, id);
|
||||
}
|
||||
return id;
|
||||
};
|
||||
};
|
||||
|
||||
// creates a series of virtual modules with require calls to the passed-in
|
||||
// modules.
|
||||
exports.requireCallsTo = function* (
|
||||
modules: Iterable<Module>,
|
||||
idForPath: IdForPathFn,
|
||||
): Iterable<Module> {
|
||||
for (const module of modules) {
|
||||
yield virtualModule(`require(${idForPath(module.file)});`);
|
||||
}
|
||||
};
|
|
@ -1 +0,0 @@
|
|||
{"main": "ModuleGraph.js"}
|
|
@ -1,15 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const {Console} = require('console');
|
||||
const {Writable} = require('stream');
|
||||
|
||||
const write = (_, __, callback) => callback();
|
||||
module.exports = new Console(new Writable({write, writev: write}));
|
|
@ -1,23 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const generate = require('babel-generator').default;
|
||||
const stub = require('sinon/lib/sinon/stub');
|
||||
|
||||
exports.fn = () => {
|
||||
const s = stub();
|
||||
const f = jest.fn(s);
|
||||
f.stub = s;
|
||||
return f;
|
||||
};
|
||||
|
||||
const generateOptions = {concise: true};
|
||||
exports.codeFromAst = ast => generate(ast, generateOptions).code;
|
||||
exports.comparableCode = code => code.trim().replace(/\s\s+/g, ' ');
|
|
@ -1,172 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
import type {FBSourceMap, MappingsMap, SourceMap} from '../lib/SourceMap';
|
||||
import type {Ast} from 'babel-core';
|
||||
import type {Console} from 'console';
|
||||
export type {Transformer} from '../JSTransformer/worker';
|
||||
|
||||
export type BuildResult = {|
|
||||
...GraphResult,
|
||||
prependedScripts: $ReadOnlyArray<Module>,
|
||||
|};
|
||||
|
||||
export type Callback<A = void, B = void>
|
||||
= (Error => void)
|
||||
& ((null | void, A, B) => void);
|
||||
|
||||
type Dependency = {|
|
||||
id: string,
|
||||
path: string,
|
||||
|};
|
||||
|
||||
export type File = {|
|
||||
code: string,
|
||||
map: ?MappingsMap,
|
||||
path: string,
|
||||
type: CodeFileTypes,
|
||||
|};
|
||||
|
||||
type CodeFileTypes = 'module' | 'script';
|
||||
|
||||
export type GraphFn = (
|
||||
entryPoints: Iterable<string>,
|
||||
platform: string,
|
||||
options?: ?GraphOptions,
|
||||
callback?: Callback<GraphResult>,
|
||||
) => void;
|
||||
|
||||
type GraphOptions = {|
|
||||
log?: Console,
|
||||
optimize?: boolean,
|
||||
skip?: Set<string>,
|
||||
|};
|
||||
|
||||
export type GraphResult = {|
|
||||
entryModules: Iterable<Module>,
|
||||
modules: Iterable<Module>,
|
||||
|};
|
||||
|
||||
export type IdForPathFn = {path: string} => number;
|
||||
|
||||
export type LoadFn = (
|
||||
file: string,
|
||||
options: LoadOptions,
|
||||
callback: Callback<File, Array<string>>,
|
||||
) => void;
|
||||
|
||||
type LoadOptions = {|
|
||||
log?: Console,
|
||||
optimize?: boolean,
|
||||
platform?: string,
|
||||
|};
|
||||
|
||||
export type Module = {|
|
||||
dependencies: Array<Dependency>,
|
||||
file: File,
|
||||
|};
|
||||
|
||||
export type PostProcessModules = (
|
||||
modules: Iterable<Module>,
|
||||
entryPoints: Array<string>,
|
||||
) => Iterable<Module>;
|
||||
|
||||
export type OutputFn<M: FBSourceMap | SourceMap = FBSourceMap | SourceMap> = ({|
|
||||
filename: string,
|
||||
idForPath: IdForPathFn,
|
||||
modules: Iterable<Module>,
|
||||
requireCalls: Iterable<Module>,
|
||||
sourceMapPath?: string,
|
||||
|}) => OutputResult<M>;
|
||||
|
||||
type OutputResult<M: FBSourceMap | SourceMap> = {|
|
||||
code: string | Buffer,
|
||||
extraFiles?: Iterable<[string, string | Buffer]>,
|
||||
map: M,
|
||||
|};
|
||||
|
||||
export type PackageData = {|
|
||||
browser?: Object | string,
|
||||
main?: string,
|
||||
name?: string,
|
||||
'react-native'?: Object | string,
|
||||
|};
|
||||
|
||||
export type ResolveFn = (
|
||||
id: string,
|
||||
source: ?string,
|
||||
platform: string,
|
||||
options?: ResolveOptions,
|
||||
callback: Callback<string>,
|
||||
) => void;
|
||||
|
||||
type ResolveOptions = {
|
||||
log?: Console,
|
||||
};
|
||||
|
||||
export type TransformerResult = {|
|
||||
ast: ?Ast,
|
||||
code: string,
|
||||
map: ?MappingsMap,
|
||||
|};
|
||||
|
||||
export type TransformResult = {|
|
||||
code: string,
|
||||
dependencies: Array<string>,
|
||||
dependencyMapName?: string,
|
||||
map: ?MappingsMap,
|
||||
|};
|
||||
|
||||
export type TransformResults = {[string]: TransformResult};
|
||||
|
||||
export type TransformVariants = {+[name: string]: {}, +default: {}};
|
||||
|
||||
export type TransformedCodeFile = {
|
||||
+code: string,
|
||||
+file: string,
|
||||
+hasteID: ?string,
|
||||
package?: PackageData,
|
||||
+transformed: TransformResults,
|
||||
+type: CodeFileTypes,
|
||||
};
|
||||
|
||||
export type AssetFile = {|
|
||||
+assetContentBase64: string,
|
||||
+filePath: string,
|
||||
|};
|
||||
|
||||
export type TransformedSourceFile =
|
||||
| {|
|
||||
+type: 'code',
|
||||
+details: TransformedCodeFile,
|
||||
|}
|
||||
| {|
|
||||
+type: 'asset',
|
||||
+details: AssetFile,
|
||||
|}
|
||||
;
|
||||
|
||||
export type LibraryOptions = {|
|
||||
dependencies?: Array<string>,
|
||||
optimize: boolean,
|
||||
platform?: string,
|
||||
rebasePath: string => string,
|
||||
|};
|
||||
|
||||
export type Base64Content = string;
|
||||
export type AssetContentsByPath = {[destFilePath: string]: Base64Content};
|
||||
|
||||
export type Library = {|
|
||||
+files: Array<TransformedCodeFile>,
|
||||
/* cannot be a Map because it's JSONified later on */
|
||||
+assets: AssetContentsByPath,
|
||||
|};
|
|
@ -1,29 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const asyncify: Asyncify = require('async/asyncify');
|
||||
const optimizeModule = require('./worker/optimize-module');
|
||||
const transformModule = require('./worker/transform-module');
|
||||
const wrapWorkerFn = require('./worker/wrap-worker-fn');
|
||||
|
||||
import type {Callback} from './types.flow';
|
||||
import type {OptimizationOptions} from './worker/optimize-module';
|
||||
import type {TransformOptions} from './worker/transform-module';
|
||||
import type {WorkerFnWithIO} from './worker/wrap-worker-fn';
|
||||
|
||||
type Asyncify = <A, B, C>((A, B) => C) => (A, B, Callback<C>) => void;
|
||||
|
||||
|
||||
exports.optimizeModule =
|
||||
(wrapWorkerFn(asyncify(optimizeModule)): WorkerFnWithIO<OptimizationOptions>);
|
||||
exports.transformModule =
|
||||
(wrapWorkerFn(transformModule): WorkerFnWithIO<TransformOptions>);
|
|
@ -1,52 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const babel = require('babel-core');
|
||||
|
||||
const MODULE_FACTORY_PARAMETERS = ['global', 'require', 'module', 'exports'];
|
||||
const POLYFILL_FACTORY_PARAMETERS = ['global'];
|
||||
|
||||
function wrapModule(fileAst: Object, dependencyMapName: string): Object {
|
||||
const t = babel.types;
|
||||
const params = MODULE_FACTORY_PARAMETERS.concat(dependencyMapName);
|
||||
const factory = functionFromProgram(fileAst.program, params);
|
||||
const def = t.callExpression(t.identifier('__d'), [factory]);
|
||||
return t.file(t.program([t.expressionStatement(def)]));
|
||||
}
|
||||
|
||||
function wrapPolyfill(fileAst: Object): Object {
|
||||
const t = babel.types;
|
||||
const factory = functionFromProgram(fileAst.program, POLYFILL_FACTORY_PARAMETERS);
|
||||
const iife = t.callExpression(factory, [t.identifier('this')]);
|
||||
return t.file(t.program([t.expressionStatement(iife)]));
|
||||
}
|
||||
|
||||
function functionFromProgram(program: Object, parameters: Array<string>): Object {
|
||||
const t = babel.types;
|
||||
return t.functionExpression(
|
||||
t.identifier(''),
|
||||
parameters.map(makeIdentifier),
|
||||
t.blockStatement(program.body, program.directives),
|
||||
);
|
||||
}
|
||||
|
||||
function makeIdentifier(name: string): Object {
|
||||
return babel.types.identifier(name);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
MODULE_FACTORY_PARAMETERS,
|
||||
POLYFILL_FACTORY_PARAMETERS,
|
||||
wrapModule,
|
||||
wrapPolyfill,
|
||||
};
|
|
@ -1,135 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2017-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const collectDependencies = require('../collect-dependencies');
|
||||
const astFromCode = require('babylon').parse;
|
||||
const {codeFromAst, comparableCode} = require('../../test-helpers');
|
||||
|
||||
const {any} = expect;
|
||||
|
||||
describe('dependency collection from ASTs:', () => {
|
||||
it('collects dependency identifiers from the code', () => {
|
||||
const ast = astFromCode(`
|
||||
const a = require('b/lib/a');
|
||||
exports.do = () => require("do");
|
||||
if (!something) {
|
||||
require("setup/something");
|
||||
}
|
||||
`);
|
||||
|
||||
expect(collectDependencies(ast).dependencies)
|
||||
.toEqual(['b/lib/a', 'do', 'setup/something']);
|
||||
});
|
||||
|
||||
it('supports template literals as arguments', () => {
|
||||
const ast = astFromCode('require(`left-pad`)');
|
||||
|
||||
expect(collectDependencies(ast).dependencies)
|
||||
.toEqual(['left-pad']);
|
||||
});
|
||||
|
||||
it('ignores template literals with interpolations', () => {
|
||||
const ast = astFromCode('require(`left${"-"}pad`)');
|
||||
|
||||
expect(collectDependencies(ast).dependencies)
|
||||
.toEqual([]);
|
||||
});
|
||||
|
||||
it('ignores tagged template literals', () => {
|
||||
const ast = astFromCode('require(tag`left-pad`)');
|
||||
|
||||
expect(collectDependencies(ast).dependencies)
|
||||
.toEqual([]);
|
||||
});
|
||||
|
||||
it('exposes a string as `dependencyMapName`', () => {
|
||||
const ast = astFromCode('require("arbitrary")');
|
||||
expect(collectDependencies(ast).dependencyMapName)
|
||||
.toEqual(any(String));
|
||||
});
|
||||
|
||||
it('exposes a string as `dependencyMapName` even without collecting dependencies', () => {
|
||||
const ast = astFromCode('');
|
||||
expect(collectDependencies(ast).dependencyMapName)
|
||||
.toEqual(any(String));
|
||||
});
|
||||
|
||||
it('replaces all required module ID strings with array lookups, keeps the ID as second argument',
|
||||
() => {
|
||||
const ast = astFromCode(`
|
||||
const a = require('b/lib/a');
|
||||
const b = require(123);
|
||||
exports.do = () => require("do");
|
||||
if (!something) {
|
||||
require("setup/something");
|
||||
}
|
||||
`);
|
||||
|
||||
const {dependencyMapName} = collectDependencies(ast);
|
||||
|
||||
expect(codeFromAst(ast)).toEqual(comparableCode(`
|
||||
const a = require(${dependencyMapName}[0], 'b/lib/a');
|
||||
const b = require(123);
|
||||
exports.do = () => require(${dependencyMapName}[1], "do");
|
||||
if (!something) {
|
||||
require(${dependencyMapName}[2], "setup/something");
|
||||
}
|
||||
`));
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('Dependency collection from optimized ASTs:', () => {
|
||||
const dependencyMapName = 'arbitrary';
|
||||
const {forOptimization} = collectDependencies;
|
||||
let ast, names;
|
||||
|
||||
beforeEach(() => {
|
||||
ast = astFromCode(`
|
||||
const a = require(${dependencyMapName}[0], 'b/lib/a');
|
||||
const b = require(123);
|
||||
exports.do = () => require(${dependencyMapName}[1], "do");
|
||||
if (!something) {
|
||||
require(${dependencyMapName}[2], "setup/something");
|
||||
}
|
||||
`);
|
||||
names = ['b/lib/a', 'do', 'setup/something'];
|
||||
});
|
||||
|
||||
it('passes the `dependencyMapName` through', () => {
|
||||
const result = forOptimization(ast, names, dependencyMapName);
|
||||
expect(result.dependencyMapName).toEqual(dependencyMapName);
|
||||
});
|
||||
|
||||
it('returns the list of passed in dependencies', () => {
|
||||
const result = forOptimization(ast, names, dependencyMapName);
|
||||
expect(result.dependencies).toEqual(names);
|
||||
});
|
||||
|
||||
it('only returns dependencies that are in the code', () => {
|
||||
ast = astFromCode(`require(${dependencyMapName}[1], 'do')`);
|
||||
const result = forOptimization(ast, names, dependencyMapName);
|
||||
expect(result.dependencies).toEqual(['do']);
|
||||
});
|
||||
|
||||
it('replaces all call signatures inserted by a prior call to `collectDependencies`', () => {
|
||||
forOptimization(ast, names, dependencyMapName);
|
||||
expect(codeFromAst(ast)).toEqual(comparableCode(`
|
||||
const a = require(${dependencyMapName}[0]);
|
||||
const b = require(123);
|
||||
exports.do = () => require(${dependencyMapName}[1]);
|
||||
if (!something) {
|
||||
require(${dependencyMapName}[2]);
|
||||
}
|
||||
`));
|
||||
});
|
||||
});
|
|
@ -1,131 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const optimizeModule = require('../optimize-module');
|
||||
const transformModule = require('../transform-module');
|
||||
const transformer = require('../../../transformer.js');
|
||||
const {SourceMapConsumer} = require('source-map');
|
||||
const {fn} = require('../../test-helpers');
|
||||
|
||||
const {objectContaining} = jasmine;
|
||||
|
||||
describe('optimizing JS modules', () => {
|
||||
const filename = 'arbitrary/file.js';
|
||||
const optimizationOptions = {
|
||||
dev: false,
|
||||
platform: 'android',
|
||||
postMinifyProcess: x => x,
|
||||
};
|
||||
const originalCode =
|
||||
`if (Platform.OS !== 'android') {
|
||||
require('arbitrary-dev');
|
||||
} else {
|
||||
__DEV__ ? require('arbitrary-android-dev') : require('arbitrary-android-prod');
|
||||
}`;
|
||||
|
||||
let transformResult;
|
||||
beforeAll(done => {
|
||||
transformModule(originalCode, {filename, transformer}, (error, result) => {
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
transformResult = JSON.stringify({type: 'code', details: result.details});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('copies everything from the transformed file, except for transform results', () => {
|
||||
const result = optimizeModule(transformResult, optimizationOptions);
|
||||
const expected = JSON.parse(transformResult).details;
|
||||
delete expected.transformed;
|
||||
expect(result.type).toBe('code');
|
||||
expect(result.details).toEqual(objectContaining(expected));
|
||||
});
|
||||
|
||||
describe('code optimization', () => {
|
||||
let dependencyMapName, injectedVars, optimized, requireName;
|
||||
beforeAll(() => {
|
||||
const result = optimizeModule(transformResult, optimizationOptions);
|
||||
optimized = result.details.transformed.default;
|
||||
injectedVars = optimized.code.match(/function\(([^)]*)/)[1].split(',');
|
||||
[, requireName,,, dependencyMapName] = injectedVars;
|
||||
});
|
||||
|
||||
it('optimizes code', () => {
|
||||
expect(optimized.code)
|
||||
.toEqual(`__d(function(${injectedVars}){${requireName}(${dependencyMapName}[0])});`);
|
||||
});
|
||||
|
||||
it('extracts dependencies', () => {
|
||||
expect(optimized.dependencies).toEqual(['arbitrary-android-prod']);
|
||||
});
|
||||
|
||||
it('creates source maps', () => {
|
||||
const consumer = new SourceMapConsumer(optimized.map);
|
||||
const column = optimized.code.lastIndexOf(requireName + '(');
|
||||
const loc = findLast(originalCode, 'require');
|
||||
|
||||
expect(consumer.originalPositionFor({line: 1, column}))
|
||||
.toEqual(objectContaining(loc));
|
||||
});
|
||||
|
||||
it('does not extract dependencies for polyfills', () => {
|
||||
const result = optimizeModule(
|
||||
transformResult,
|
||||
{...optimizationOptions, isPolyfill: true},
|
||||
).details;
|
||||
expect(result.transformed.default.dependencies).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('post-processing', () => {
|
||||
let postMinifyProcess, optimize;
|
||||
beforeEach(() => {
|
||||
postMinifyProcess = fn();
|
||||
optimize = () =>
|
||||
optimizeModule(transformResult, {...optimizationOptions, postMinifyProcess});
|
||||
});
|
||||
|
||||
it('passes the result to the provided postprocessing function', () => {
|
||||
postMinifyProcess.stub.callsFake(x => x);
|
||||
const result = optimize();
|
||||
const {code, map} = result.details.transformed.default;
|
||||
expect(postMinifyProcess).toBeCalledWith({code, map});
|
||||
});
|
||||
|
||||
it('uses the result of the provided postprocessing function for the result', () => {
|
||||
const code = 'var postprocessed = "code";';
|
||||
const map = {version: 3, mappings: 'postprocessed'};
|
||||
postMinifyProcess.stub.returns({code, map});
|
||||
expect(optimize().details.transformed.default)
|
||||
.toEqual(objectContaining({code, map}));
|
||||
});
|
||||
});
|
||||
|
||||
it('passes through non-code data unmodified', () => {
|
||||
const data = {type: 'asset', details: {arbitrary: 'data'}};
|
||||
expect(optimizeModule(JSON.stringify(data), {dev: true, platform: ''}))
|
||||
.toEqual(data);
|
||||
});
|
||||
});
|
||||
|
||||
function findLast(code, needle) {
|
||||
const lines = code.split(/(?:(?!.)\s)+/);
|
||||
let line = lines.length;
|
||||
while (line--) {
|
||||
const column = lines[line].lastIndexOf(needle);
|
||||
if (column !== -1) {
|
||||
return {line: line + 1, column};
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
|
@ -1,277 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @format
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const transformModule = require('../transform-module');
|
||||
|
||||
const t = require('babel-types');
|
||||
const {SourceMapConsumer} = require('source-map');
|
||||
const {fn} = require('../../test-helpers');
|
||||
const {parse} = require('babylon');
|
||||
const generate = require('babel-generator').default;
|
||||
const {traverse} = require('babel-core');
|
||||
|
||||
describe('transforming JS modules:', () => {
|
||||
const filename = 'arbitrary';
|
||||
|
||||
let transformer;
|
||||
|
||||
beforeEach(() => {
|
||||
transformer = {
|
||||
transform: fn(),
|
||||
};
|
||||
transformer.transform.stub.returns(transformResult());
|
||||
});
|
||||
|
||||
const {bodyAst, sourceCode, transformedCode} = createTestData();
|
||||
|
||||
const options = variants => ({
|
||||
filename,
|
||||
transformer,
|
||||
variants,
|
||||
});
|
||||
|
||||
const transformResult = (body = bodyAst) => ({
|
||||
ast: t.file(t.program(body)),
|
||||
});
|
||||
|
||||
it('passes through file name and code', done => {
|
||||
transformModule(sourceCode, options(), (error, result) => {
|
||||
expect(result.type).toBe('code');
|
||||
expect(result.details).toEqual(
|
||||
expect.objectContaining({
|
||||
code: sourceCode,
|
||||
file: filename,
|
||||
}),
|
||||
);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('exposes a haste ID if present', done => {
|
||||
const hasteID = 'TheModule';
|
||||
const codeWithHasteID = `/** @providesModule ${hasteID} */`;
|
||||
transformModule(codeWithHasteID, options(), (error, result) => {
|
||||
expect(result.type).toBe('code');
|
||||
expect(result.details).toEqual(expect.objectContaining({hasteID}));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('sets `type` to `"module"` by default', done => {
|
||||
transformModule(sourceCode, options(), (error, result) => {
|
||||
expect(result.type).toBe('code');
|
||||
expect(result.details).toEqual(expect.objectContaining({type: 'module'}));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('sets `type` to `"script"` if the input is a polyfill', done => {
|
||||
transformModule(
|
||||
sourceCode,
|
||||
{...options(), polyfill: true},
|
||||
(error, result) => {
|
||||
expect(result.type).toBe('code');
|
||||
expect(result.details).toEqual(
|
||||
expect.objectContaining({type: 'script'}),
|
||||
);
|
||||
done();
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
const defaults = {
|
||||
dev: false,
|
||||
generateSourceMaps: true,
|
||||
hot: false,
|
||||
inlineRequires: false,
|
||||
platform: '',
|
||||
projectRoot: '',
|
||||
};
|
||||
|
||||
it(
|
||||
'calls the passed-in transform function with code, file name, and options ' +
|
||||
'for all passed in variants',
|
||||
done => {
|
||||
const variants = {dev: {dev: true}, prod: {dev: false}};
|
||||
|
||||
transformModule(sourceCode, options(variants), () => {
|
||||
expect(transformer.transform).toBeCalledWith({
|
||||
filename,
|
||||
localPath: filename,
|
||||
options: {...defaults, ...variants.dev},
|
||||
src: sourceCode,
|
||||
});
|
||||
expect(transformer.transform).toBeCalledWith({
|
||||
filename,
|
||||
localPath: filename,
|
||||
options: {...defaults, ...variants.prod},
|
||||
src: sourceCode,
|
||||
});
|
||||
done();
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
it('calls back with any error yielded by the transform function', done => {
|
||||
const error = new Error();
|
||||
transformer.transform.stub.throws(error);
|
||||
|
||||
transformModule(sourceCode, options(), e => {
|
||||
expect(e).toBe(error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('wraps the code produced by the transform function into a module factory', done => {
|
||||
transformModule(sourceCode, options(), (error, result) => {
|
||||
expect(error).toEqual(null);
|
||||
|
||||
const {code, dependencyMapName} = result.details.transformed.default;
|
||||
expect(code.replace(/\s+/g, '')).toEqual(
|
||||
`__d(function(global,require,module,exports,${dependencyMapName}){${transformedCode}});`,
|
||||
);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('wraps the code produced by the transform function into an IIFE for polyfills', done => {
|
||||
transformModule(
|
||||
sourceCode,
|
||||
{...options(), polyfill: true},
|
||||
(error, result) => {
|
||||
expect(error).toEqual(null);
|
||||
|
||||
const {code} = result.details.transformed.default;
|
||||
expect(code.replace(/\s+/g, '')).toEqual(
|
||||
`(function(global){${transformedCode}})(this);`,
|
||||
);
|
||||
done();
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('creates source maps', done => {
|
||||
transformModule(sourceCode, options(), (error, result) => {
|
||||
const {code, map} = result.details.transformed.default;
|
||||
const column = code.indexOf('code');
|
||||
const consumer = new SourceMapConsumer(map);
|
||||
expect(consumer.originalPositionFor({line: 1, column})).toEqual(
|
||||
expect.objectContaining({line: 1, column: sourceCode.indexOf('code')}),
|
||||
);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('extracts dependencies (require calls)', done => {
|
||||
const dep1 = 'foo';
|
||||
const dep2 = 'bar';
|
||||
const code = `require('${dep1}'),require('${dep2}')`;
|
||||
const {body} = parse(code).program;
|
||||
transformer.transform.stub.returns(transformResult(body));
|
||||
|
||||
transformModule(code, options(), (error, result) => {
|
||||
expect(result.details.transformed.default).toEqual(
|
||||
expect.objectContaining({dependencies: [dep1, dep2]}),
|
||||
);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('transforms for all variants', done => {
|
||||
const variants = {dev: {dev: true}, prod: {dev: false}};
|
||||
transformer.transform.stub
|
||||
.withArgs(filename, sourceCode, variants.dev)
|
||||
.returns(transformResult(bodyAst))
|
||||
.withArgs(filename, sourceCode, variants.prod)
|
||||
.returns(transformResult([]));
|
||||
|
||||
transformModule(sourceCode, options(variants), (error, result) => {
|
||||
const {dev, prod} = result.details.transformed;
|
||||
expect(dev.code.replace(/\s+/g, '')).toEqual(
|
||||
`__d(function(global,require,module,exports,${dev.dependencyMapName}){arbitrary(code);});`,
|
||||
);
|
||||
expect(prod.code.replace(/\s+/g, '')).toEqual(
|
||||
`__d(function(global,require,module,exports,${prod.dependencyMapName}){arbitrary(code);});`,
|
||||
);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('prefixes JSON files with `module.exports = `', done => {
|
||||
const json = '{"foo":"bar"}';
|
||||
|
||||
transformModule(
|
||||
json,
|
||||
{...options(), filename: 'some.json'},
|
||||
(error, result) => {
|
||||
const {code} = result.details.transformed.default;
|
||||
expect(code.replace(/\s+/g, '')).toEqual(
|
||||
'__d(function(global,require,module,exports){' +
|
||||
`module.exports=${json}});`,
|
||||
);
|
||||
done();
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('does not create source maps for JSON files', done => {
|
||||
transformModule(
|
||||
'{}',
|
||||
{...options(), filename: 'some.json'},
|
||||
(error, result) => {
|
||||
expect(result.details.transformed.default).toEqual(
|
||||
expect.objectContaining({map: null}),
|
||||
);
|
||||
done();
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('adds package data for `package.json` files', done => {
|
||||
const pkg = {
|
||||
name: 'package-name',
|
||||
main: 'package/main',
|
||||
browser: {browser: 'defs'},
|
||||
'react-native': {'react-native': 'defs'},
|
||||
};
|
||||
|
||||
transformModule(
|
||||
JSON.stringify(pkg),
|
||||
{...options(), filename: 'arbitrary/package.json'},
|
||||
(error, result) => {
|
||||
expect(result.details.package).toEqual(pkg);
|
||||
done();
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
function createTestData() {
|
||||
// creates test data with an transformed AST, so that we can test source
|
||||
// map generation.
|
||||
const sourceCode = 'some(arbitrary(code));';
|
||||
const fileAst = parse(sourceCode);
|
||||
traverse(fileAst, {
|
||||
CallExpression(path) {
|
||||
if (path.node.callee.name === 'some') {
|
||||
path.replaceWith(path.node.arguments[0]);
|
||||
}
|
||||
},
|
||||
});
|
||||
return {
|
||||
bodyAst: fileAst.program.body,
|
||||
sourceCode,
|
||||
transformedCode: generate(fileAst).code,
|
||||
};
|
||||
}
|
|
@ -1,89 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest
|
||||
.disableAutomock()
|
||||
.setMock('fs', jest.genMockFromModule('fs'))
|
||||
.mock('mkdirp');
|
||||
|
||||
const wrapWorkerFn = require('../wrap-worker-fn');
|
||||
const {dirname} = require('path');
|
||||
const {fn} = require('../../test-helpers');
|
||||
|
||||
const {any} = jasmine;
|
||||
|
||||
describe('wrapWorkerFn:', () => {
|
||||
const infile = '/arbitrary/in/file';
|
||||
const outfile = '/arbitrary/in/file';
|
||||
|
||||
let workerFn, wrapped;
|
||||
beforeEach(() => {
|
||||
workerFn = fn();
|
||||
workerFn.stub.yields();
|
||||
wrapped = wrapWorkerFn(workerFn);
|
||||
});
|
||||
|
||||
const fs = require('fs');
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
it('reads the passed-in file synchronously as buffer', done => {
|
||||
wrapped(infile, outfile, {}, () => {
|
||||
expect(fs.readFileSync).toBeCalledWith(infile);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls the worker function with file contents and options', done => {
|
||||
const contents = 'arbitrary(contents);';
|
||||
const options = {arbitrary: 'options'};
|
||||
fs.readFileSync.mockReturnValue(contents);
|
||||
wrapped(infile, outfile, options, () => {
|
||||
expect(workerFn).toBeCalledWith(contents, options, any(Function));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('passes through any error that the worker function calls back with', done => {
|
||||
const error = new Error();
|
||||
workerFn.stub.yields(error);
|
||||
wrapped(infile, outfile, {}, e => {
|
||||
expect(e).toBe(error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('writes the result to disk', done => {
|
||||
const result = {arbitrary: 'result'};
|
||||
workerFn.stub.yields(null, result);
|
||||
wrapped(infile, outfile, {}, () => {
|
||||
expect(mkdirp.sync).toBeCalledWith(dirname(outfile));
|
||||
expect(fs.writeFileSync).toBeCalledWith(outfile, JSON.stringify(result), 'utf8');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls back with any error thrown by `mkdirp.sync`', done => {
|
||||
const error = new Error();
|
||||
mkdirp.sync.mockImplementationOnce(() => { throw error; });
|
||||
wrapped(infile, outfile, {}, e => {
|
||||
expect(e).toBe(error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls back with any error thrown by `fs.writeFileSync`', done => {
|
||||
const error = new Error();
|
||||
fs.writeFileSync.mockImplementationOnce(() => { throw error; });
|
||||
wrapped(infile, outfile, {}, e => {
|
||||
expect(e).toBe(error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,150 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const nullthrows = require('fbjs/lib/nullthrows');
|
||||
|
||||
const {traverse, types} = require('babel-core');
|
||||
|
||||
type AST = Object;
|
||||
|
||||
class Replacement {
|
||||
nameToIndex: Map<string, number>;
|
||||
nextIndex: number;
|
||||
|
||||
constructor() {
|
||||
this.nameToIndex = new Map();
|
||||
this.nextIndex = 0;
|
||||
}
|
||||
|
||||
isRequireCall(callee, firstArg) {
|
||||
return (
|
||||
callee.type === 'Identifier' && callee.name === 'require' &&
|
||||
firstArg && isLiteralString(firstArg)
|
||||
);
|
||||
}
|
||||
|
||||
getIndex(stringLiteralOrTemplateLiteral) {
|
||||
const name = stringLiteralOrTemplateLiteral.quasis
|
||||
? stringLiteralOrTemplateLiteral.quasis[0].value.cooked
|
||||
: stringLiteralOrTemplateLiteral.value;
|
||||
let index = this.nameToIndex.get(name);
|
||||
if (index !== undefined) {
|
||||
return index;
|
||||
}
|
||||
index = this.nextIndex++;
|
||||
this.nameToIndex.set(name, index);
|
||||
return index;
|
||||
}
|
||||
|
||||
getNames() {
|
||||
return Array.from(this.nameToIndex.keys());
|
||||
}
|
||||
|
||||
makeArgs(newId, oldId, dependencyMapIdentifier) {
|
||||
const mapLookup = createMapLookup(dependencyMapIdentifier, newId);
|
||||
return [mapLookup, oldId];
|
||||
}
|
||||
}
|
||||
|
||||
class ProdReplacement {
|
||||
replacement: Replacement;
|
||||
names: Array<string>;
|
||||
|
||||
constructor(names) {
|
||||
this.replacement = new Replacement();
|
||||
this.names = names;
|
||||
}
|
||||
|
||||
isRequireCall(callee, firstArg) {
|
||||
return (
|
||||
callee.type === 'Identifier' &&
|
||||
callee.name === 'require' &&
|
||||
firstArg &&
|
||||
firstArg.type === 'MemberExpression' &&
|
||||
firstArg.property &&
|
||||
firstArg.property.type === 'NumericLiteral'
|
||||
);
|
||||
}
|
||||
|
||||
getIndex(memberExpression) {
|
||||
const id = memberExpression.property.value;
|
||||
if (id in this.names) {
|
||||
return this.replacement.getIndex({value: this.names[id]});
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`${id} is not a known module ID. Existing mappings: ${
|
||||
this.names.map((n, i) => `${i} => ${n}`).join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
getNames() {
|
||||
return this.replacement.getNames();
|
||||
}
|
||||
|
||||
makeArgs(newId, _, dependencyMapIdentifier) {
|
||||
const mapLookup = createMapLookup(dependencyMapIdentifier, newId);
|
||||
return [mapLookup];
|
||||
}
|
||||
}
|
||||
|
||||
function createMapLookup(dependencyMapIdentifier, propertyIdentifier) {
|
||||
return types.memberExpression(
|
||||
dependencyMapIdentifier,
|
||||
propertyIdentifier,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
function collectDependencies(ast, replacement, dependencyMapIdentifier) {
|
||||
const traversalState = {dependencyMapIdentifier};
|
||||
traverse(ast, {
|
||||
Program(path, state) {
|
||||
if (!state.dependencyMapIdentifier) {
|
||||
state.dependencyMapIdentifier =
|
||||
path.scope.generateUidIdentifier('dependencyMap');
|
||||
}
|
||||
},
|
||||
CallExpression(path, state) {
|
||||
const node = path.node;
|
||||
const arg = node.arguments[0];
|
||||
if (replacement.isRequireCall(node.callee, arg)) {
|
||||
const index = replacement.getIndex(arg);
|
||||
node.arguments = replacement.makeArgs(
|
||||
types.numericLiteral(index),
|
||||
arg,
|
||||
state.dependencyMapIdentifier,
|
||||
);
|
||||
}
|
||||
},
|
||||
}, null, traversalState);
|
||||
|
||||
return {
|
||||
dependencies: replacement.getNames(),
|
||||
dependencyMapName: nullthrows(traversalState.dependencyMapIdentifier).name,
|
||||
};
|
||||
}
|
||||
|
||||
function isLiteralString(node) {
|
||||
return node.type === 'StringLiteral' ||
|
||||
node.type === 'TemplateLiteral' && node.quasis.length === 1;
|
||||
}
|
||||
|
||||
exports = module.exports =
|
||||
(ast: AST) => collectDependencies(ast, new Replacement());
|
||||
exports.forOptimization =
|
||||
(ast: AST, names: Array<string>, dependencyMapName?: string) =>
|
||||
collectDependencies(
|
||||
ast,
|
||||
new ProdReplacement(names),
|
||||
dependencyMapName ? types.identifier(dependencyMapName) : undefined,
|
||||
);
|
|
@ -1,26 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const babelGenerate = require('babel-generator').default;
|
||||
|
||||
function generate(ast: Object, filename: string, sourceCode: string) {
|
||||
return babelGenerate(ast, {
|
||||
comments: false,
|
||||
compact: true,
|
||||
filename,
|
||||
sourceFileName: filename,
|
||||
sourceMaps: true,
|
||||
sourceMapTarget: filename,
|
||||
}, sourceCode);
|
||||
}
|
||||
|
||||
module.exports = generate;
|
|
@ -1,125 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const babel = require('babel-core');
|
||||
const collectDependencies = require('./collect-dependencies');
|
||||
const constantFolding = require('../../JSTransformer/worker/constant-folding').plugin;
|
||||
const generate = require('./generate');
|
||||
const inline = require('../../JSTransformer/worker/inline').plugin;
|
||||
const minify = require('../../JSTransformer/worker/minify');
|
||||
const sourceMap = require('source-map');
|
||||
|
||||
import type {TransformedSourceFile, TransformResult} from '../types.flow';
|
||||
import type {MappingsMap, SourceMap} from '../../lib/SourceMap';
|
||||
import type {PostMinifyProcess} from '../../Bundler/index.js';
|
||||
|
||||
|
||||
export type OptimizationOptions = {|
|
||||
dev: boolean,
|
||||
isPolyfill?: boolean,
|
||||
platform: string,
|
||||
postMinifyProcess: PostMinifyProcess,
|
||||
|};
|
||||
|
||||
function optimizeModule(
|
||||
content: Buffer,
|
||||
optimizationOptions: OptimizationOptions,
|
||||
): TransformedSourceFile {
|
||||
const data: TransformedSourceFile = JSON.parse(content.toString('utf8'));
|
||||
|
||||
if (data.type !== 'code') {
|
||||
return data;
|
||||
}
|
||||
|
||||
const {details} = data;
|
||||
const {code, file, transformed} = details;
|
||||
const result = {...details, transformed: {}};
|
||||
const {postMinifyProcess} = optimizationOptions;
|
||||
|
||||
//$FlowIssue #14545724
|
||||
Object.entries(transformed).forEach(([k, t: TransformResult]: [*, TransformResult]) => {
|
||||
const optimized = optimize(t, file, code, optimizationOptions);
|
||||
const processed = postMinifyProcess({code: optimized.code, map: optimized.map});
|
||||
optimized.code = processed.code;
|
||||
optimized.map = processed.map;
|
||||
result.transformed[k] = optimized;
|
||||
});
|
||||
|
||||
return {type: 'code', details: result};
|
||||
}
|
||||
|
||||
function optimize(transformed, file, originalCode, options) {
|
||||
const {code, dependencyMapName, map} = transformed;
|
||||
const optimized = optimizeCode(code, map, file, options);
|
||||
|
||||
let dependencies;
|
||||
if (options.isPolyfill) {
|
||||
dependencies = [];
|
||||
} else {
|
||||
({dependencies} = collectDependencies.forOptimization(
|
||||
optimized.ast,
|
||||
transformed.dependencies,
|
||||
dependencyMapName,
|
||||
));
|
||||
}
|
||||
|
||||
const inputMap = transformed.map;
|
||||
const gen = generate(optimized.ast, file, originalCode);
|
||||
|
||||
const min = minify(
|
||||
file,
|
||||
gen.code,
|
||||
inputMap && mergeSourceMaps(file, inputMap, gen.map),
|
||||
);
|
||||
return {code: min.code, map: min.map, dependencies};
|
||||
}
|
||||
|
||||
function optimizeCode(code, map, filename, inliningOptions) {
|
||||
return babel.transform(code, {
|
||||
plugins: [
|
||||
[constantFolding],
|
||||
[inline, {...inliningOptions, isWrapped: true}],
|
||||
],
|
||||
babelrc: false,
|
||||
code: false,
|
||||
filename,
|
||||
});
|
||||
}
|
||||
|
||||
function mergeSourceMaps(
|
||||
file: string,
|
||||
originalMap: SourceMap,
|
||||
secondMap: SourceMap,
|
||||
): MappingsMap {
|
||||
const merged = new sourceMap.SourceMapGenerator();
|
||||
const inputMap = new sourceMap.SourceMapConsumer(originalMap);
|
||||
new sourceMap.SourceMapConsumer(secondMap)
|
||||
.eachMapping(mapping => {
|
||||
const original = inputMap.originalPositionFor({
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn,
|
||||
});
|
||||
if (original.line == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
merged.addMapping({
|
||||
generated: {line: mapping.generatedLine, column: mapping.generatedColumn},
|
||||
original: {line: original.line, column: original.column || 0},
|
||||
source: file,
|
||||
name: original.name || mapping.name,
|
||||
});
|
||||
});
|
||||
return merged.toJSON();
|
||||
}
|
||||
|
||||
module.exports = optimizeModule;
|
|
@ -1,183 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
* @format
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const JsFileWrapping = require('./JsFileWrapping');
|
||||
|
||||
const asyncify = require('async/asyncify');
|
||||
const collectDependencies = require('./collect-dependencies');
|
||||
const defaults = require('../../defaults');
|
||||
const docblock = require('../../node-haste/DependencyGraph/docblock');
|
||||
const generate = require('./generate');
|
||||
const path = require('path');
|
||||
const series = require('async/series');
|
||||
|
||||
const {basename} = require('path');
|
||||
|
||||
import type {
|
||||
Callback,
|
||||
TransformedCodeFile,
|
||||
TransformedSourceFile,
|
||||
Transformer,
|
||||
TransformerResult,
|
||||
TransformResult,
|
||||
TransformVariants,
|
||||
} from '../types.flow';
|
||||
|
||||
export type TransformOptions = {|
|
||||
filename: string,
|
||||
polyfill?: boolean,
|
||||
transformer: Transformer<*>,
|
||||
variants?: TransformVariants,
|
||||
|};
|
||||
|
||||
const defaultTransformOptions = {
|
||||
dev: true,
|
||||
generateSourceMaps: true,
|
||||
hot: false,
|
||||
inlineRequires: false,
|
||||
platform: '',
|
||||
projectRoot: '',
|
||||
};
|
||||
const defaultVariants = {default: {}};
|
||||
|
||||
const ASSET_EXTENSIONS = new Set(defaults.assetExts);
|
||||
|
||||
function transformModule(
|
||||
content: Buffer,
|
||||
options: TransformOptions,
|
||||
callback: Callback<TransformedSourceFile>,
|
||||
): void {
|
||||
if (ASSET_EXTENSIONS.has(path.extname(options.filename).substr(1))) {
|
||||
transformAsset(content, options, callback);
|
||||
return;
|
||||
}
|
||||
|
||||
const code = content.toString('utf8');
|
||||
if (options.filename.endsWith('.json')) {
|
||||
transformJSON(code, options, callback);
|
||||
return;
|
||||
}
|
||||
|
||||
const {filename, transformer, variants = defaultVariants} = options;
|
||||
const tasks = {};
|
||||
Object.keys(variants).forEach(name => {
|
||||
tasks[name] = asyncify(() =>
|
||||
transformer.transform({
|
||||
filename,
|
||||
localPath: filename,
|
||||
options: {...defaultTransformOptions, ...variants[name]},
|
||||
src: code,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
series(tasks, (error, results: {[key: string]: TransformerResult}) => {
|
||||
if (error) {
|
||||
callback(error);
|
||||
return;
|
||||
}
|
||||
|
||||
const transformed: {[key: string]: TransformResult} = {};
|
||||
|
||||
//$FlowIssue #14545724
|
||||
Object.entries(results).forEach(([key, value]: [*, TransformFnResult]) => {
|
||||
transformed[key] = makeResult(
|
||||
value.ast,
|
||||
filename,
|
||||
code,
|
||||
options.polyfill,
|
||||
);
|
||||
});
|
||||
|
||||
const annotations = docblock.parseAsObject(docblock.extract(code));
|
||||
|
||||
callback(null, {
|
||||
type: 'code',
|
||||
details: {
|
||||
assetContent: null,
|
||||
code,
|
||||
file: filename,
|
||||
hasteID: annotations.providesModule || null,
|
||||
transformed,
|
||||
type: options.polyfill ? 'script' : 'module',
|
||||
},
|
||||
});
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
function transformJSON(json, options, callback) {
|
||||
const value = JSON.parse(json);
|
||||
const {filename} = options;
|
||||
const code = `__d(function(${JsFileWrapping.MODULE_FACTORY_PARAMETERS.join(', ')}) { module.exports = \n${json}\n});`;
|
||||
|
||||
const moduleData = {
|
||||
code,
|
||||
map: null, // no source map for JSON files!
|
||||
dependencies: [],
|
||||
};
|
||||
const transformed = {};
|
||||
|
||||
Object.keys(options.variants || defaultVariants).forEach(
|
||||
key => (transformed[key] = moduleData),
|
||||
);
|
||||
|
||||
const result: TransformedCodeFile = {
|
||||
assetContent: null,
|
||||
code: json,
|
||||
file: filename,
|
||||
hasteID: value.name,
|
||||
transformed,
|
||||
type: 'module',
|
||||
};
|
||||
|
||||
if (basename(filename) === 'package.json') {
|
||||
result.package = {
|
||||
name: value.name,
|
||||
main: value.main,
|
||||
browser: value.browser,
|
||||
'react-native': value['react-native'],
|
||||
};
|
||||
}
|
||||
callback(null, {type: 'code', details: result});
|
||||
}
|
||||
|
||||
function transformAsset(
|
||||
content: Buffer,
|
||||
options: TransformOptions,
|
||||
callback: Callback<TransformedSourceFile>,
|
||||
) {
|
||||
callback(null, {
|
||||
details: {
|
||||
assetContentBase64: content.toString('base64'),
|
||||
filePath: options.filename,
|
||||
},
|
||||
type: 'asset',
|
||||
});
|
||||
}
|
||||
|
||||
function makeResult(ast, filename, sourceCode, isPolyfill = false) {
|
||||
let dependencies, dependencyMapName, file;
|
||||
if (isPolyfill) {
|
||||
dependencies = [];
|
||||
file = JsFileWrapping.wrapPolyfill(ast);
|
||||
} else {
|
||||
({dependencies, dependencyMapName} = collectDependencies(ast));
|
||||
file = JsFileWrapping.wrapModule(ast, dependencyMapName);
|
||||
}
|
||||
|
||||
const gen = generate(file, filename, sourceCode);
|
||||
return {code: gen.code, map: gen.map, dependencies, dependencyMapName};
|
||||
}
|
||||
|
||||
module.exports = transformModule;
|
|
@ -1,62 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2016-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
const {dirname} = require('path');
|
||||
|
||||
import type {Callback} from '../types.flow';
|
||||
|
||||
type Path = string;
|
||||
type WorkerFn<Options> = (
|
||||
fileContents: Buffer,
|
||||
options: Options,
|
||||
callback: Callback<Object>,
|
||||
) => void;
|
||||
export type WorkerFnWithIO<Options> = (
|
||||
infile: Path,
|
||||
outfile: Path,
|
||||
options: Options,
|
||||
callback: Callback<>,
|
||||
) => void;
|
||||
|
||||
function wrapWorkerFn<Options>(
|
||||
workerFunction: WorkerFn<Options>,
|
||||
): WorkerFnWithIO<Options> {
|
||||
return (
|
||||
infile: Path,
|
||||
outfile: Path,
|
||||
options: Options,
|
||||
callback: Callback<>,
|
||||
) => {
|
||||
const contents = fs.readFileSync(infile);
|
||||
workerFunction(contents, options, (error, result) => {
|
||||
if (error) {
|
||||
callback(error);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
mkdirp.sync(dirname(outfile));
|
||||
fs.writeFileSync(outfile, JSON.stringify(result), 'utf8');
|
||||
} catch (writeError) {
|
||||
callback(writeError);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(null);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = wrapWorkerFn;
|
|
@ -1,553 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.useRealTimers();
|
||||
|
||||
jest.unmock('../');
|
||||
jest.unmock('../../defaults');
|
||||
jest.mock('path');
|
||||
|
||||
const {join: pathJoin} = require.requireActual('path');
|
||||
const DependencyGraph = jest.fn();
|
||||
jest.setMock('../../node-haste/DependencyGraph', DependencyGraph);
|
||||
let Module;
|
||||
let Polyfill;
|
||||
|
||||
describe('Resolver', function() {
|
||||
let Resolver, path;
|
||||
|
||||
beforeEach(function() {
|
||||
Resolver = require('../');
|
||||
path = require('path');
|
||||
DependencyGraph.mockClear();
|
||||
Module = jest.fn(function() {
|
||||
this.getName = jest.fn();
|
||||
this.getDependencies = jest.fn();
|
||||
this.isPolyfill = jest.fn().mockReturnValue(false);
|
||||
this.isJSON = jest.fn().mockReturnValue(false);
|
||||
});
|
||||
Polyfill = jest.fn(function() {
|
||||
var polyfill = new Module();
|
||||
polyfill.isPolyfill.mockReturnValue(true);
|
||||
return polyfill;
|
||||
});
|
||||
|
||||
DependencyGraph.load = jest.fn().mockImplementation(
|
||||
opts => Promise.resolve(new DependencyGraph(opts)),
|
||||
);
|
||||
DependencyGraph.prototype.createPolyfill = jest.fn();
|
||||
DependencyGraph.prototype.getDependencies = jest.fn();
|
||||
|
||||
// For the polyfillDeps
|
||||
path.join = jest.fn((a, b) => b);
|
||||
|
||||
DependencyGraph.prototype.load = jest.fn(() => Promise.resolve());
|
||||
});
|
||||
|
||||
class ResolutionResponseMock {
|
||||
constructor({dependencies, mainModuleId}) {
|
||||
this.dependencies = dependencies;
|
||||
this.mainModuleId = mainModuleId;
|
||||
this.getModuleId = createGetModuleId();
|
||||
}
|
||||
|
||||
prependDependency(dependency) {
|
||||
this.dependencies.unshift(dependency);
|
||||
}
|
||||
|
||||
finalize() {
|
||||
return Promise.resolve(this);
|
||||
}
|
||||
|
||||
getResolvedDependencyPairs() {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function createModule(id, dependencies) {
|
||||
var module = new Module({});
|
||||
module.path = id;
|
||||
module.getName.mockImplementation(() => Promise.resolve(id));
|
||||
module.getDependencies.mockImplementation(() => Promise.resolve(dependencies));
|
||||
return module;
|
||||
}
|
||||
|
||||
function createJsonModule(id) {
|
||||
const module = createModule(id, []);
|
||||
module.isJSON.mockReturnValue(true);
|
||||
return module;
|
||||
}
|
||||
|
||||
function createPolyfill(id, dependencies) {
|
||||
var polyfill = new Polyfill({});
|
||||
polyfill.getName = jest.fn(() => Promise.resolve(id));
|
||||
polyfill.getDependencies =
|
||||
jest.fn(() => Promise.resolve(dependencies));
|
||||
return polyfill;
|
||||
}
|
||||
|
||||
describe('getDependencies', function() {
|
||||
it('forwards transform options to the dependency graph', function() {
|
||||
expect.assertions(1);
|
||||
const transformOptions = {arbitrary: 'options'};
|
||||
const platform = 'ios';
|
||||
const entry = '/root/index.js';
|
||||
|
||||
DependencyGraph.prototype.getDependencies.mockImplementation(
|
||||
() => Promise.reject());
|
||||
return Resolver.load({projectRoot: '/root'})
|
||||
.then(r => r.getDependencies(entry, {platform}, transformOptions))
|
||||
.catch(() => {
|
||||
expect(DependencyGraph.prototype.getDependencies).toBeCalledWith({
|
||||
entryPath: entry,
|
||||
platform,
|
||||
options: transformOptions,
|
||||
recursive: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('passes custom platforms to the dependency graph', function() {
|
||||
expect.assertions(1);
|
||||
return Resolver.load({ // eslint-disable-line no-new
|
||||
projectRoot: '/root',
|
||||
platforms: ['ios', 'windows', 'vr'],
|
||||
}).then(() => {
|
||||
const platforms = DependencyGraph.mock.calls[0][0].platforms;
|
||||
expect(Array.from(platforms)).toEqual(['ios', 'windows', 'vr']);
|
||||
});
|
||||
});
|
||||
|
||||
it('should get dependencies with polyfills', function() {
|
||||
expect.assertions(5);
|
||||
|
||||
var module = createModule('index');
|
||||
var deps = [module];
|
||||
|
||||
var depResolverPromise = Resolver.load({
|
||||
projectRoot: '/root',
|
||||
});
|
||||
|
||||
DependencyGraph.prototype.getDependencies.mockImplementation(function() {
|
||||
return Promise.resolve(new ResolutionResponseMock({
|
||||
dependencies: deps,
|
||||
mainModuleId: 'index',
|
||||
}));
|
||||
});
|
||||
|
||||
const polyfill = {
|
||||
id: 'polyfills/Object.es6.js',
|
||||
file: 'polyfills/Object.es6.js',
|
||||
dependencies: [],
|
||||
};
|
||||
DependencyGraph.prototype.createPolyfill.mockReturnValueOnce(polyfill);
|
||||
|
||||
return depResolverPromise
|
||||
.then(r => r.getDependencies(
|
||||
'/root/index.js',
|
||||
{dev: false},
|
||||
undefined,
|
||||
undefined,
|
||||
createGetModuleId()
|
||||
)).then(function(result) {
|
||||
expect(result.mainModuleId).toEqual('index');
|
||||
expect(result.dependencies[result.dependencies.length - 1]).toBe(module);
|
||||
|
||||
expect(DependencyGraph.mock.instances[0].getDependencies)
|
||||
.toBeCalledWith({entryPath: '/root/index.js', recursive: true});
|
||||
expect(result.dependencies[0]).toEqual(polyfill);
|
||||
|
||||
expect(
|
||||
DependencyGraph
|
||||
.prototype
|
||||
.createPolyfill
|
||||
.mock
|
||||
.calls
|
||||
.map(call => call[0]))
|
||||
.toEqual([
|
||||
{id: 'polyfills/Object.es6.js',
|
||||
file: 'polyfills/Object.es6.js',
|
||||
dependencies: [],
|
||||
},
|
||||
{id: 'polyfills/console.js',
|
||||
file: 'polyfills/console.js',
|
||||
dependencies: [
|
||||
'polyfills/Object.es6.js',
|
||||
],
|
||||
},
|
||||
{id: 'polyfills/error-guard.js',
|
||||
file: 'polyfills/error-guard.js',
|
||||
dependencies: [
|
||||
'polyfills/Object.es6.js',
|
||||
'polyfills/console.js',
|
||||
],
|
||||
},
|
||||
{id: 'polyfills/Number.es6.js',
|
||||
file: 'polyfills/Number.es6.js',
|
||||
dependencies: [
|
||||
'polyfills/Object.es6.js',
|
||||
'polyfills/console.js',
|
||||
'polyfills/error-guard.js',
|
||||
],
|
||||
},
|
||||
{id: 'polyfills/String.prototype.es6.js',
|
||||
file: 'polyfills/String.prototype.es6.js',
|
||||
dependencies: [
|
||||
'polyfills/Object.es6.js',
|
||||
'polyfills/console.js',
|
||||
'polyfills/error-guard.js',
|
||||
'polyfills/Number.es6.js',
|
||||
],
|
||||
},
|
||||
{id: 'polyfills/Array.prototype.es6.js',
|
||||
file: 'polyfills/Array.prototype.es6.js',
|
||||
dependencies: [
|
||||
'polyfills/Object.es6.js',
|
||||
'polyfills/console.js',
|
||||
'polyfills/error-guard.js',
|
||||
'polyfills/Number.es6.js',
|
||||
'polyfills/String.prototype.es6.js',
|
||||
],
|
||||
},
|
||||
{id: 'polyfills/Array.es6.js',
|
||||
file: 'polyfills/Array.es6.js',
|
||||
dependencies: [
|
||||
'polyfills/Object.es6.js',
|
||||
'polyfills/console.js',
|
||||
'polyfills/error-guard.js',
|
||||
'polyfills/Number.es6.js',
|
||||
'polyfills/String.prototype.es6.js',
|
||||
'polyfills/Array.prototype.es6.js',
|
||||
],
|
||||
},
|
||||
{id: 'polyfills/Object.es7.js',
|
||||
file: 'polyfills/Object.es7.js',
|
||||
dependencies: [
|
||||
'polyfills/Object.es6.js',
|
||||
'polyfills/console.js',
|
||||
'polyfills/error-guard.js',
|
||||
'polyfills/Number.es6.js',
|
||||
'polyfills/String.prototype.es6.js',
|
||||
'polyfills/Array.prototype.es6.js',
|
||||
'polyfills/Array.es6.js',
|
||||
],
|
||||
},
|
||||
{id: 'polyfills/babelHelpers.js',
|
||||
file: 'polyfills/babelHelpers.js',
|
||||
dependencies: [
|
||||
'polyfills/Object.es6.js',
|
||||
'polyfills/console.js',
|
||||
'polyfills/error-guard.js',
|
||||
'polyfills/Number.es6.js',
|
||||
'polyfills/String.prototype.es6.js',
|
||||
'polyfills/Array.prototype.es6.js',
|
||||
'polyfills/Array.es6.js',
|
||||
'polyfills/Object.es7.js',
|
||||
],
|
||||
},
|
||||
].map(({id, file, dependencies}) => ({
|
||||
id: pathJoin(__dirname, '..', id),
|
||||
file: pathJoin(__dirname, '..', file),
|
||||
dependencies: dependencies.map(d => pathJoin(__dirname, '..', d)),
|
||||
})));
|
||||
});
|
||||
});
|
||||
|
||||
it('should pass in more polyfills', function() {
|
||||
expect.assertions(2);
|
||||
|
||||
var module = createModule('index');
|
||||
var deps = [module];
|
||||
|
||||
var depResolverPromise = Resolver.load({
|
||||
projectRoot: '/root',
|
||||
polyfillModuleNames: ['some module'],
|
||||
});
|
||||
|
||||
DependencyGraph.prototype.getDependencies.mockImplementation(function() {
|
||||
return Promise.resolve(new ResolutionResponseMock({
|
||||
dependencies: deps,
|
||||
mainModuleId: 'index',
|
||||
}));
|
||||
});
|
||||
|
||||
return depResolverPromise
|
||||
.then(r => r.getDependencies(
|
||||
'/root/index.js',
|
||||
{dev: false},
|
||||
undefined,
|
||||
undefined,
|
||||
createGetModuleId()
|
||||
)).then(result => {
|
||||
expect(result.mainModuleId).toEqual('index');
|
||||
const calls =
|
||||
DependencyGraph.prototype.createPolyfill.mock.calls[result.dependencies.length - 2];
|
||||
expect(calls).toEqual([
|
||||
{file: 'some module',
|
||||
id: 'some module',
|
||||
dependencies: [
|
||||
'polyfills/Object.es6.js',
|
||||
'polyfills/console.js',
|
||||
'polyfills/error-guard.js',
|
||||
'polyfills/Number.es6.js',
|
||||
'polyfills/String.prototype.es6.js',
|
||||
'polyfills/Array.prototype.es6.js',
|
||||
'polyfills/Array.es6.js',
|
||||
'polyfills/Object.es7.js',
|
||||
'polyfills/babelHelpers.js',
|
||||
].map(d => pathJoin(__dirname, '..', d)),
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('wrapModule', function() {
|
||||
let depResolver;
|
||||
beforeEach(() => {
|
||||
return Resolver.load({
|
||||
projectRoot: '/root',
|
||||
}).then(r => { depResolver = r; });
|
||||
});
|
||||
|
||||
it('should resolve modules', function() {
|
||||
expect.assertions(1);
|
||||
|
||||
/*eslint-disable */
|
||||
var code = [
|
||||
// require
|
||||
'require("x")',
|
||||
'require("y");require(\'abc\');',
|
||||
'require( \'z\' )',
|
||||
'require( "a")',
|
||||
'require("b" )',
|
||||
].join('\n');
|
||||
/*eslint-disable */
|
||||
|
||||
function *findDependencyOffsets() {
|
||||
const re = /(['"']).*?\1/g;
|
||||
let match;
|
||||
while ((match = re.exec(code))) {
|
||||
yield match.index;
|
||||
}
|
||||
}
|
||||
|
||||
const dependencyOffsets = Array.from(findDependencyOffsets());
|
||||
const module = createModule('test module', ['x', 'y']);
|
||||
const resolutionResponse = new ResolutionResponseMock({
|
||||
dependencies: [module],
|
||||
mainModuleId: 'test module',
|
||||
});
|
||||
|
||||
resolutionResponse.getResolvedDependencyPairs = (module) => {
|
||||
return [
|
||||
['x', createModule('changed')],
|
||||
['y', createModule('Y')],
|
||||
['abc', createModule('abc')]
|
||||
];
|
||||
}
|
||||
|
||||
const moduleIds = new Map(
|
||||
resolutionResponse
|
||||
.getResolvedDependencyPairs()
|
||||
.map(([importId, module]) => [
|
||||
importId,
|
||||
padRight(resolutionResponse.getModuleId(module), importId.length + 2),
|
||||
])
|
||||
);
|
||||
|
||||
return depResolver.wrapModule({
|
||||
resolutionResponse,
|
||||
module: module,
|
||||
name: 'test module',
|
||||
code,
|
||||
meta: {dependencyOffsets},
|
||||
dev: false,
|
||||
}).then(({code: processedCode}) => {
|
||||
expect(processedCode).toEqual([
|
||||
'__d(/* test module */function(global, require, module, exports) {' +
|
||||
// require
|
||||
`require(${moduleIds.get('x')}) // ${moduleIds.get('x').trim()} = x`,
|
||||
`require(${moduleIds.get('y')});require(${moduleIds.get('abc')
|
||||
}); // ${moduleIds.get('abc').trim()} = abc // ${moduleIds.get('y').trim()} = y`,
|
||||
'require( \'z\' )',
|
||||
'require( "a")',
|
||||
'require("b" )',
|
||||
`}, ${resolutionResponse.getModuleId(module)});`,
|
||||
].join('\n'));
|
||||
});
|
||||
});
|
||||
|
||||
it('should add module transport names as fourth argument to `__d`', () => {
|
||||
expect.assertions(1);
|
||||
|
||||
const module = createModule('test module');
|
||||
const code = 'arbitrary(code)'
|
||||
const resolutionResponse = new ResolutionResponseMock({
|
||||
dependencies: [module],
|
||||
mainModuleId: 'test module',
|
||||
});
|
||||
return depResolver.wrapModule({
|
||||
resolutionResponse,
|
||||
code,
|
||||
module,
|
||||
name: 'test module',
|
||||
dev: true,
|
||||
}).then(({code: processedCode}) =>
|
||||
expect(processedCode).toEqual([
|
||||
'__d(/* test module */function(global, require, module, exports) {' +
|
||||
code,
|
||||
`}, ${resolutionResponse.getModuleId(module)}, null, "test module");`
|
||||
].join('\n'))
|
||||
);
|
||||
});
|
||||
|
||||
it('should pass through passed-in source maps', () => {
|
||||
expect.assertions(1);
|
||||
const module = createModule('test module');
|
||||
const resolutionResponse = new ResolutionResponseMock({
|
||||
dependencies: [module],
|
||||
mainModuleId: 'test module',
|
||||
});
|
||||
const inputMap = {version: 3, mappings: 'ARBITRARY'};
|
||||
return depResolver.wrapModule({
|
||||
resolutionResponse,
|
||||
module,
|
||||
name: 'test module',
|
||||
code: 'arbitrary(code)',
|
||||
map: inputMap,
|
||||
}).then(({map}) => expect(map).toBe(inputMap));
|
||||
});
|
||||
|
||||
it('should resolve polyfills', function () {
|
||||
expect.assertions(1);
|
||||
return Resolver.load({
|
||||
projectRoot: '/root',
|
||||
}).then(depResolver => {;
|
||||
const polyfill = createPolyfill('test polyfill', []);
|
||||
const code = [
|
||||
'global.fetch = () => 1;',
|
||||
].join('');
|
||||
return depResolver.wrapModule({
|
||||
module: polyfill,
|
||||
code
|
||||
}).then(({code: processedCode}) => {
|
||||
expect(processedCode).toEqual([
|
||||
'(function(global) {',
|
||||
'global.fetch = () => 1;',
|
||||
'\n})' +
|
||||
"(typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : this);",
|
||||
].join(''));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('JSON files:', () => {
|
||||
const code = JSON.stringify({arbitrary: "data"});
|
||||
const id = 'arbitrary.json';
|
||||
let depResolver, module, resolutionResponse;
|
||||
|
||||
beforeEach(() => {
|
||||
return Resolver.load({projectRoot: '/root'}).then(r => {
|
||||
depResolver = r;
|
||||
module = createJsonModule(id);
|
||||
resolutionResponse = new ResolutionResponseMock({
|
||||
dependencies: [module],
|
||||
mainModuleId: id,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should prefix JSON files with `module.exports=`', () => {
|
||||
expect.assertions(1);
|
||||
return depResolver
|
||||
.wrapModule({resolutionResponse, module, name: id, code, dev: false})
|
||||
.then(({code: processedCode}) =>
|
||||
expect(processedCode).toEqual([
|
||||
`__d(/* ${id} */function(global, require, module, exports) {`,
|
||||
`module.exports = ${code}\n}, ${resolutionResponse.getModuleId(module)});`,
|
||||
].join('')));
|
||||
});
|
||||
});
|
||||
|
||||
describe('minification:', () => {
|
||||
const code ='arbitrary(code)';
|
||||
const id = 'arbitrary.js';
|
||||
let depResolver, minifyCode, module, resolutionResponse, sourceMap;
|
||||
|
||||
beforeEach(() => {
|
||||
minifyCode = jest.fn((filename, code, map) =>
|
||||
Promise.resolve({code, map}));
|
||||
module = createModule(id);
|
||||
module.path = '/arbitrary/path.js';
|
||||
resolutionResponse = new ResolutionResponseMock({
|
||||
dependencies: [module],
|
||||
mainModuleId: id,
|
||||
});
|
||||
sourceMap = {version: 3, sources: ['input'], mappings: 'whatever'};
|
||||
return Resolver.load({
|
||||
projectRoot: '/root',
|
||||
minifyCode,
|
||||
}).then(r => { depResolver = r; });
|
||||
});
|
||||
|
||||
it('should invoke the minifier with the wrapped code', () => {
|
||||
expect.assertions(1);
|
||||
const wrappedCode =
|
||||
`__d(/* ${id} */function(global, require, module, exports) {${
|
||||
code}\n}, ${resolutionResponse.getModuleId(module)});`
|
||||
return depResolver
|
||||
.wrapModule({
|
||||
resolutionResponse,
|
||||
module,
|
||||
name: id,
|
||||
code,
|
||||
map: sourceMap,
|
||||
minify: true,
|
||||
dev: false,
|
||||
}).then(() => {
|
||||
expect(minifyCode).toBeCalledWith(module.path, wrappedCode, sourceMap);
|
||||
});
|
||||
});
|
||||
|
||||
it('should use minified code', () => {
|
||||
expect.assertions(2);
|
||||
const minifiedCode = 'minified(code)';
|
||||
const minifiedMap = {version: 3, file: ['minified']};
|
||||
minifyCode.mockReturnValue(Promise.resolve({code: minifiedCode, map: minifiedMap}));
|
||||
return depResolver
|
||||
.wrapModule({resolutionResponse, module, name: id, code, minify: true})
|
||||
.then(({code, map}) => {
|
||||
expect(code).toEqual(minifiedCode);
|
||||
expect(map).toEqual(minifiedMap);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function createGetModuleId() {
|
||||
let nextId = 1;
|
||||
const knownIds = new Map();
|
||||
function createId(path) {
|
||||
const id = nextId;
|
||||
nextId += 1;
|
||||
knownIds.set(path, id);
|
||||
return id;
|
||||
}
|
||||
|
||||
return ({path}) => knownIds.get(path) || createId(path);
|
||||
}
|
||||
|
||||
function padRight(value, width) {
|
||||
const s = String(value);
|
||||
const diff = width - s.length;
|
||||
return diff > 0 ? s + Array(diff + 1).join(' ') : s;
|
||||
}
|
||||
});
|
|
@ -1,290 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const DependencyGraph = require('../node-haste/DependencyGraph');
|
||||
|
||||
const defaults = require('../defaults');
|
||||
const pathJoin = require('path').join;
|
||||
|
||||
import type ResolutionResponse from '../node-haste/DependencyGraph/ResolutionResponse';
|
||||
import type Module, {HasteImpl, TransformCode} from '../node-haste/Module';
|
||||
import type {MappingsMap} from '../lib/SourceMap';
|
||||
import type {PostMinifyProcess} from '../Bundler';
|
||||
import type {Options as JSTransformerOptions} from '../JSTransformer/worker';
|
||||
import type {Reporter} from '../lib/reporting';
|
||||
import type {TransformCache, GetTransformCacheKey} from '../lib/TransformCaching';
|
||||
import type {GlobalTransformCache} from '../lib/GlobalTransformCache';
|
||||
|
||||
type MinifyCode = (filePath: string, code: string, map: MappingsMap) =>
|
||||
Promise<{code: string, map: MappingsMap}>;
|
||||
|
||||
type ContainsTransformerOptions = {+transformer: JSTransformerOptions}
|
||||
|
||||
type Options = {|
|
||||
+assetExts: Array<string>,
|
||||
+blacklistRE?: RegExp,
|
||||
+extraNodeModules: ?{},
|
||||
+getTransformCacheKey: GetTransformCacheKey,
|
||||
+globalTransformCache: ?GlobalTransformCache,
|
||||
+hasteImpl?: HasteImpl,
|
||||
+maxWorkerCount: number,
|
||||
+minifyCode: MinifyCode,
|
||||
+postMinifyProcess: PostMinifyProcess,
|
||||
+platforms: Set<string>,
|
||||
+polyfillModuleNames?: Array<string>,
|
||||
+projectRoots: $ReadOnlyArray<string>,
|
||||
+providesModuleNodeModules: Array<string>,
|
||||
+reporter: Reporter,
|
||||
+resetCache: boolean,
|
||||
+sourceExts: Array<string>,
|
||||
+transformCache: TransformCache,
|
||||
+transformCode: TransformCode,
|
||||
+watch: boolean,
|
||||
|};
|
||||
|
||||
class Resolver {
|
||||
|
||||
_depGraph: DependencyGraph;
|
||||
_minifyCode: MinifyCode;
|
||||
_postMinifyProcess: PostMinifyProcess;
|
||||
_polyfillModuleNames: Array<string>;
|
||||
|
||||
constructor(opts: Options, depGraph: DependencyGraph) {
|
||||
this._minifyCode = opts.minifyCode;
|
||||
this._postMinifyProcess = opts.postMinifyProcess;
|
||||
this._polyfillModuleNames = opts.polyfillModuleNames || [];
|
||||
this._depGraph = depGraph;
|
||||
}
|
||||
|
||||
static async load(opts: Options): Promise<Resolver> {
|
||||
const depGraphOpts = Object.assign(Object.create(opts), {
|
||||
assetDependencies: ['react-native/Libraries/Image/AssetRegistry'],
|
||||
forceNodeFilesystemAPI: false,
|
||||
ignoreFilePath(filepath) {
|
||||
return filepath.indexOf('__tests__') !== -1 ||
|
||||
(opts.blacklistRE != null && opts.blacklistRE.test(filepath));
|
||||
},
|
||||
moduleOptions: {
|
||||
hasteImpl: opts.hasteImpl,
|
||||
resetCache: opts.resetCache,
|
||||
transformCache: opts.transformCache,
|
||||
},
|
||||
preferNativePlatform: true,
|
||||
roots: opts.projectRoots,
|
||||
useWatchman: true,
|
||||
});
|
||||
const depGraph = await DependencyGraph.load(depGraphOpts);
|
||||
return new Resolver(opts, depGraph);
|
||||
}
|
||||
|
||||
getShallowDependencies(
|
||||
entryFile: string,
|
||||
transformOptions: JSTransformerOptions,
|
||||
): Promise<Array<Module>> {
|
||||
return this._depGraph.getShallowDependencies(entryFile, transformOptions);
|
||||
}
|
||||
|
||||
getModuleForPath(entryFile: string): Module {
|
||||
return this._depGraph.getModuleForPath(entryFile);
|
||||
}
|
||||
|
||||
getDependencies<T: ContainsTransformerOptions>(
|
||||
entryPath: string,
|
||||
options: {platform: ?string, recursive?: boolean},
|
||||
bundlingOptions: T,
|
||||
onProgress?: ?(finishedModules: number, totalModules: number) => mixed,
|
||||
getModuleId: mixed,
|
||||
): Promise<ResolutionResponse<Module, T>> {
|
||||
const {platform, recursive = true} = options;
|
||||
return this._depGraph.getDependencies({
|
||||
entryPath,
|
||||
platform,
|
||||
options: bundlingOptions,
|
||||
recursive,
|
||||
onProgress,
|
||||
}).then(resolutionResponse => {
|
||||
this._getPolyfillDependencies().reverse().forEach(
|
||||
polyfill => resolutionResponse.prependDependency(polyfill)
|
||||
);
|
||||
|
||||
/* $FlowFixMe: monkey patching */
|
||||
resolutionResponse.getModuleId = getModuleId;
|
||||
return resolutionResponse.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
getModuleSystemDependencies({dev = true}: {dev?: boolean}): Array<Module> {
|
||||
|
||||
const prelude = dev
|
||||
? pathJoin(__dirname, 'polyfills/prelude_dev.js')
|
||||
: pathJoin(__dirname, 'polyfills/prelude.js');
|
||||
|
||||
const moduleSystem = defaults.moduleSystem;
|
||||
|
||||
return [
|
||||
prelude,
|
||||
moduleSystem,
|
||||
].map(moduleName => this._depGraph.createPolyfill({
|
||||
file: moduleName,
|
||||
id: moduleName,
|
||||
dependencies: [],
|
||||
}));
|
||||
}
|
||||
|
||||
_getPolyfillDependencies(): Array<Module> {
|
||||
const polyfillModuleNames = defaults.polyfills.concat(this._polyfillModuleNames);
|
||||
|
||||
return polyfillModuleNames.map(
|
||||
(polyfillModuleName, idx) => this._depGraph.createPolyfill({
|
||||
file: polyfillModuleName,
|
||||
id: polyfillModuleName,
|
||||
dependencies: polyfillModuleNames.slice(0, idx),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
resolveRequires<T: ContainsTransformerOptions>(
|
||||
resolutionResponse: ResolutionResponse<Module, T>,
|
||||
module: Module,
|
||||
code: string,
|
||||
dependencyOffsets: Array<number> = [],
|
||||
): string {
|
||||
const resolvedDeps = Object.create(null);
|
||||
|
||||
// here, we build a map of all require strings (relative and absolute)
|
||||
// to the canonical ID of the module they reference
|
||||
resolutionResponse.getResolvedDependencyPairs(module)
|
||||
.forEach(([depName, depModule]) => {
|
||||
if (depModule) {
|
||||
/* $FlowFixMe: `getModuleId` is monkey-patched so may not exist */
|
||||
resolvedDeps[depName] = resolutionResponse.getModuleId(depModule);
|
||||
}
|
||||
});
|
||||
|
||||
// if we have a canonical ID for the module imported here,
|
||||
// we use it, so that require() is always called with the same
|
||||
// id for every module.
|
||||
// Example:
|
||||
// -- in a/b.js:
|
||||
// require('./c') => require(3);
|
||||
// -- in b/index.js:
|
||||
// require('../a/c') => require(3);
|
||||
return dependencyOffsets.reduceRight(
|
||||
([unhandled, handled], offset) => [
|
||||
unhandled.slice(0, offset),
|
||||
replaceDependencyID(unhandled.slice(offset) + handled, resolvedDeps),
|
||||
],
|
||||
[code, ''],
|
||||
).join('');
|
||||
}
|
||||
|
||||
wrapModule<T: ContainsTransformerOptions>({
|
||||
resolutionResponse,
|
||||
module,
|
||||
name,
|
||||
map,
|
||||
code,
|
||||
meta = {},
|
||||
dev = true,
|
||||
minify = false,
|
||||
}: {
|
||||
resolutionResponse: ResolutionResponse<Module, T>,
|
||||
module: Module,
|
||||
name: string,
|
||||
map: MappingsMap,
|
||||
code: string,
|
||||
meta?: {
|
||||
dependencyOffsets?: Array<number>,
|
||||
},
|
||||
dev?: boolean,
|
||||
minify?: boolean,
|
||||
}) {
|
||||
if (module.isJSON()) {
|
||||
code = `module.exports = ${code}`;
|
||||
}
|
||||
|
||||
if (module.isPolyfill()) {
|
||||
code = definePolyfillCode(code);
|
||||
} else {
|
||||
/* $FlowFixMe: `getModuleId` is monkey-patched so may not exist */
|
||||
const moduleId = resolutionResponse.getModuleId(module);
|
||||
code = this.resolveRequires(
|
||||
resolutionResponse,
|
||||
module,
|
||||
code,
|
||||
meta.dependencyOffsets
|
||||
);
|
||||
code = defineModuleCode(moduleId, code, name, dev);
|
||||
}
|
||||
|
||||
return minify
|
||||
? this._minifyCode(module.path, code, map).then(this._postMinifyProcess)
|
||||
: Promise.resolve({code, map});
|
||||
}
|
||||
|
||||
minifyModule(
|
||||
{path, code, map}: {path: string, code: string, map: MappingsMap},
|
||||
): Promise<{code: string, map: MappingsMap}> {
|
||||
return this._minifyCode(path, code, map);
|
||||
}
|
||||
|
||||
getDependencyGraph(): DependencyGraph {
|
||||
return this._depGraph;
|
||||
}
|
||||
}
|
||||
|
||||
function defineModuleCode(moduleName, code, verboseName = '', dev = true) {
|
||||
return [
|
||||
`__d(/* ${verboseName} */`,
|
||||
'function(global, require, module, exports) {', // module factory
|
||||
code,
|
||||
'\n}, ',
|
||||
`${JSON.stringify(moduleName)}`, // module id, null = id map. used in ModuleGraph
|
||||
dev ? `, null, ${JSON.stringify(verboseName)}` : '',
|
||||
');',
|
||||
].join('');
|
||||
}
|
||||
|
||||
function definePolyfillCode(code) {
|
||||
return [
|
||||
'(function(global) {',
|
||||
code,
|
||||
`\n})(typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : this);`,
|
||||
].join('');
|
||||
}
|
||||
|
||||
const reDepencencyString = /^(['"])([^'"']*)\1/;
|
||||
function replaceDependencyID(stringWithDependencyIDAtStart, resolvedDeps) {
|
||||
const match = reDepencencyString.exec(stringWithDependencyIDAtStart);
|
||||
const dependencyName = match && match[2];
|
||||
if (match != null && dependencyName in resolvedDeps) {
|
||||
const {length} = match[0];
|
||||
const id = String(resolvedDeps[dependencyName]);
|
||||
return (
|
||||
padRight(id, length) +
|
||||
stringWithDependencyIDAtStart
|
||||
.slice(length)
|
||||
.replace(/$/m, ` // ${id} = ${dependencyName}`)
|
||||
);
|
||||
} else {
|
||||
return stringWithDependencyIDAtStart;
|
||||
}
|
||||
}
|
||||
|
||||
function padRight(string, length) {
|
||||
return string.length < length
|
||||
? string + Array(length - string.length + 1).join(' ')
|
||||
: string;
|
||||
}
|
||||
|
||||
module.exports = Resolver;
|
|
@ -1,86 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2013-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
*/
|
||||
|
||||
/* eslint-disable */
|
||||
|
||||
/**
|
||||
* Creates an array from array like objects.
|
||||
*
|
||||
* https://people.mozilla.org/~jorendorff/es6-draft.html#sec-array.from
|
||||
*/
|
||||
if (!Array.from) {
|
||||
Array.from = function(arrayLike /*, mapFn, thisArg */) {
|
||||
if (arrayLike == null) {
|
||||
throw new TypeError('Object is null or undefined');
|
||||
}
|
||||
|
||||
// Optional args.
|
||||
var mapFn = arguments[1];
|
||||
var thisArg = arguments[2];
|
||||
|
||||
var C = this;
|
||||
var items = Object(arrayLike);
|
||||
var symbolIterator = typeof Symbol === 'function'
|
||||
? Symbol.iterator
|
||||
: '@@iterator';
|
||||
var mapping = typeof mapFn === 'function';
|
||||
var usingIterator = typeof items[symbolIterator] === 'function';
|
||||
var key = 0;
|
||||
var ret;
|
||||
var value;
|
||||
|
||||
if (usingIterator) {
|
||||
ret = typeof C === 'function'
|
||||
? new C()
|
||||
: [];
|
||||
var it = items[symbolIterator]();
|
||||
var next;
|
||||
|
||||
while (!(next = it.next()).done) {
|
||||
value = next.value;
|
||||
|
||||
if (mapping) {
|
||||
value = mapFn.call(thisArg, value, key);
|
||||
}
|
||||
|
||||
ret[key] = value;
|
||||
key += 1;
|
||||
}
|
||||
|
||||
ret.length = key;
|
||||
return ret;
|
||||
}
|
||||
|
||||
var len = items.length;
|
||||
if (isNaN(len) || len < 0) {
|
||||
len = 0;
|
||||
}
|
||||
|
||||
ret = typeof C === 'function'
|
||||
? new C(len)
|
||||
: new Array(len);
|
||||
|
||||
while (key < len) {
|
||||
value = items[key];
|
||||
|
||||
if (mapping) {
|
||||
value = mapFn.call(thisArg, value, key);
|
||||
}
|
||||
|
||||
ret[key] = value;
|
||||
|
||||
key += 1;
|
||||
}
|
||||
|
||||
ret.length = key;
|
||||
return ret;
|
||||
};
|
||||
}
|
|
@ -1,95 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
*/
|
||||
|
||||
/* eslint-disable */
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/findIndex
|
||||
function findIndex(predicate, context) {
|
||||
if (this == null) {
|
||||
throw new TypeError(
|
||||
'Array.prototype.findIndex called on null or undefined'
|
||||
);
|
||||
}
|
||||
if (typeof predicate !== 'function') {
|
||||
throw new TypeError('predicate must be a function');
|
||||
}
|
||||
var list = Object(this);
|
||||
var length = list.length >>> 0;
|
||||
for (var i = 0; i < length; i++) {
|
||||
if (predicate.call(context, list[i], i, list)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!Array.prototype.findIndex) {
|
||||
Object.defineProperty(Array.prototype, 'findIndex', {
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
value: findIndex
|
||||
});
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/find
|
||||
if (!Array.prototype.find) {
|
||||
Object.defineProperty(Array.prototype, 'find', {
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
value: function(predicate, context) {
|
||||
if (this == null) {
|
||||
throw new TypeError(
|
||||
'Array.prototype.find called on null or undefined'
|
||||
);
|
||||
}
|
||||
var index = findIndex.call(this, predicate, context);
|
||||
return index === -1 ? undefined : this[index];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/includes
|
||||
if (!Array.prototype.includes) {
|
||||
Object.defineProperty(Array.prototype, 'includes', {
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
value: function (searchElement) {
|
||||
var O = Object(this);
|
||||
var len = parseInt(O.length) || 0;
|
||||
if (len === 0) {
|
||||
return false;
|
||||
}
|
||||
var n = parseInt(arguments[1]) || 0;
|
||||
var k;
|
||||
if (n >= 0) {
|
||||
k = n;
|
||||
} else {
|
||||
k = len + n;
|
||||
if (k < 0) {
|
||||
k = 0;
|
||||
}
|
||||
}
|
||||
var currentElement;
|
||||
while (k < len) {
|
||||
currentElement = O[k];
|
||||
if (searchElement === currentElement ||
|
||||
(searchElement !== searchElement && currentElement !== currentElement)) {
|
||||
return true;
|
||||
}
|
||||
k++;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
*/
|
||||
|
||||
/* eslint-disable strict */
|
||||
|
||||
if (Number.EPSILON === undefined) {
|
||||
Object.defineProperty(Number, 'EPSILON', {
|
||||
value: Math.pow(2, -52),
|
||||
});
|
||||
}
|
||||
if (Number.MAX_SAFE_INTEGER === undefined) {
|
||||
Object.defineProperty(Number, 'MAX_SAFE_INTEGER', {
|
||||
value: Math.pow(2, 53) - 1,
|
||||
});
|
||||
}
|
||||
if (Number.MIN_SAFE_INTEGER === undefined) {
|
||||
Object.defineProperty(Number, 'MIN_SAFE_INTEGER', {
|
||||
value: -(Math.pow(2, 53) - 1),
|
||||
});
|
||||
}
|
||||
if (!Number.isNaN) {
|
||||
// eslint-disable-next-line max-len
|
||||
// https://github.com/dherman/tc39-codex-wiki/blob/master/data/es6/number/index.md#polyfill-for-numberisnan
|
||||
const globalIsNaN = global.isNaN;
|
||||
Object.defineProperty(Number, 'isNaN', {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: function isNaN(value) {
|
||||
return typeof value === 'number' && globalIsNaN(value);
|
||||
},
|
||||
writable: true,
|
||||
});
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
*/
|
||||
|
||||
/* eslint-disable strict */
|
||||
|
||||
// WARNING: This is an optimized version that fails on hasOwnProperty checks
|
||||
// and non objects. It's not spec-compliant. It's a perf optimization.
|
||||
// This is only needed for iOS 8 and current Android JSC.
|
||||
|
||||
Object.assign = function(target, sources) {
|
||||
if (__DEV__) {
|
||||
if (target == null) {
|
||||
throw new TypeError('Object.assign target cannot be null or undefined');
|
||||
}
|
||||
if (typeof target !== 'object' && typeof target !== 'function') {
|
||||
throw new TypeError(
|
||||
'In this environment the target of assign MUST be an object.' +
|
||||
'This error is a performance optimization and not spec compliant.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (var nextIndex = 1; nextIndex < arguments.length; nextIndex++) {
|
||||
var nextSource = arguments[nextIndex];
|
||||
if (nextSource == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (__DEV__) {
|
||||
if (typeof nextSource !== 'object' &&
|
||||
typeof nextSource !== 'function') {
|
||||
throw new TypeError(
|
||||
'In this environment the sources for assign MUST be an object.' +
|
||||
'This error is a performance optimization and not spec compliant.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// We don't currently support accessors nor proxies. Therefore this
|
||||
// copy cannot throw. If we ever supported this then we must handle
|
||||
// exceptions and side-effects.
|
||||
|
||||
for (var key in nextSource) {
|
||||
if (__DEV__) {
|
||||
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
if (!hasOwnProperty.call(nextSource, key)) {
|
||||
throw new TypeError(
|
||||
'One of the sources for assign has an enumerable key on the ' +
|
||||
'prototype chain. Are you trying to assign a prototype property? ' +
|
||||
'We don\'t allow it, as this is an edge case that we do not support. ' +
|
||||
'This error is a performance optimization and not spec compliant.'
|
||||
);
|
||||
}
|
||||
}
|
||||
target[key] = nextSource[key];
|
||||
}
|
||||
}
|
||||
|
||||
return target;
|
||||
};
|
|
@ -1,59 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
const hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
/**
|
||||
* Returns an array of the given object's own enumerable entries.
|
||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/entries
|
||||
*/
|
||||
if (typeof Object.entries !== 'function') {
|
||||
Object.entries = function(object) {
|
||||
// `null` and `undefined` values are not allowed.
|
||||
if (object == null) {
|
||||
throw new TypeError('Object.entries called on non-object');
|
||||
}
|
||||
|
||||
const entries = [];
|
||||
for (const key in object) {
|
||||
if (hasOwnProperty.call(object, key)) {
|
||||
entries.push([key, object[key]]);
|
||||
}
|
||||
}
|
||||
return entries;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of the given object's own enumerable entries.
|
||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/values
|
||||
*/
|
||||
if (typeof Object.values !== 'function') {
|
||||
Object.values = function(object) {
|
||||
// `null` and `undefined` values are not allowed.
|
||||
if (object == null) {
|
||||
throw new TypeError('Object.values called on non-object');
|
||||
}
|
||||
|
||||
const values = [];
|
||||
for (const key in object) {
|
||||
if (hasOwnProperty.call(object, key)) {
|
||||
values.push(object[key]);
|
||||
}
|
||||
}
|
||||
return values;
|
||||
};
|
||||
}
|
||||
|
||||
})();
|
|
@ -1,92 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2013-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
*/
|
||||
|
||||
/* eslint-disable strict, no-extend-native, no-bitwise */
|
||||
|
||||
/*
|
||||
* NOTE: We use (Number(x) || 0) to replace NaN values with zero.
|
||||
*/
|
||||
|
||||
if (!String.prototype.startsWith) {
|
||||
String.prototype.startsWith = function(search) {
|
||||
'use strict';
|
||||
if (this == null) {
|
||||
throw TypeError();
|
||||
}
|
||||
var string = String(this);
|
||||
var pos = arguments.length > 1 ?
|
||||
(Number(arguments[1]) || 0) : 0;
|
||||
var start = Math.min(Math.max(pos, 0), string.length);
|
||||
return string.indexOf(String(search), pos) === start;
|
||||
};
|
||||
}
|
||||
|
||||
if (!String.prototype.endsWith) {
|
||||
String.prototype.endsWith = function(search) {
|
||||
'use strict';
|
||||
if (this == null) {
|
||||
throw TypeError();
|
||||
}
|
||||
var string = String(this);
|
||||
var stringLength = string.length;
|
||||
var searchString = String(search);
|
||||
var pos = arguments.length > 1 ?
|
||||
(Number(arguments[1]) || 0) : stringLength;
|
||||
var end = Math.min(Math.max(pos, 0), stringLength);
|
||||
var start = end - searchString.length;
|
||||
if (start < 0) {
|
||||
return false;
|
||||
}
|
||||
return string.lastIndexOf(searchString, start) === start;
|
||||
};
|
||||
}
|
||||
|
||||
if (!String.prototype.repeat) {
|
||||
String.prototype.repeat = function(count) {
|
||||
'use strict';
|
||||
if (this == null) {
|
||||
throw TypeError();
|
||||
}
|
||||
var string = String(this);
|
||||
count = Number(count) || 0;
|
||||
if (count < 0 || count === Infinity) {
|
||||
throw RangeError();
|
||||
}
|
||||
if (count === 1) {
|
||||
return string;
|
||||
}
|
||||
var result = '';
|
||||
while (count) {
|
||||
if (count & 1) {
|
||||
result += string;
|
||||
}
|
||||
if ((count >>= 1)) {
|
||||
string += string;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
}
|
||||
|
||||
if (!String.prototype.includes) {
|
||||
String.prototype.includes = function(search, start) {
|
||||
'use strict';
|
||||
if (typeof start !== 'number') {
|
||||
start = 0;
|
||||
}
|
||||
|
||||
if (start + search.length > this.length) {
|
||||
return false;
|
||||
} else {
|
||||
return this.indexOf(search, start) !== -1;
|
||||
}
|
||||
};
|
||||
}
|
|
@ -1,127 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2013-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @emails oncall+jsinfra
|
||||
*/
|
||||
|
||||
/* eslint-disable fb-www/object-create-only-one-param */
|
||||
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
describe('Object (ES7)', () => {
|
||||
beforeEach(() => {
|
||||
delete Object.entries;
|
||||
delete Object.values;
|
||||
jest.resetModules();
|
||||
require('../Object.es7');
|
||||
});
|
||||
|
||||
describe('Object.entries', () => {
|
||||
it('should have a length of 1', () => {
|
||||
expect(Object.entries.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should check for type', () => {
|
||||
expect(Object.entries.bind(null, null)).toThrow(TypeError(
|
||||
'Object.entries called on non-object'
|
||||
));
|
||||
expect(Object.entries.bind(null, undefined)).toThrow(TypeError(
|
||||
'Object.entries called on non-object'
|
||||
));
|
||||
expect(Object.entries.bind(null, [])).not.toThrow();
|
||||
expect(Object.entries.bind(null, () => {})).not.toThrow();
|
||||
expect(Object.entries.bind(null, {})).not.toThrow();
|
||||
expect(Object.entries.bind(null, 'abc')).not.toThrow();
|
||||
});
|
||||
|
||||
it('should return enumerable entries', () => {
|
||||
const foo = Object.defineProperties({}, {
|
||||
x: {value: 10, enumerable: true},
|
||||
y: {value: 20},
|
||||
});
|
||||
|
||||
expect(Object.entries(foo)).toEqual([['x', 10]]);
|
||||
|
||||
const bar = {x: 10, y: 20};
|
||||
expect(Object.entries(bar)).toEqual([['x', 10], ['y', 20]]);
|
||||
});
|
||||
|
||||
it('should work with proto-less objects', () => {
|
||||
const foo = Object.create(null, {
|
||||
x: {value: 10, enumerable: true},
|
||||
y: {value: 20},
|
||||
});
|
||||
|
||||
expect(Object.entries(foo)).toEqual([['x', 10]]);
|
||||
});
|
||||
|
||||
it('should return only own entries', () => {
|
||||
const foo = Object.create({z: 30}, {
|
||||
x: {value: 10, enumerable: true},
|
||||
y: {value: 20},
|
||||
});
|
||||
|
||||
expect(Object.entries(foo)).toEqual([['x', 10]]);
|
||||
});
|
||||
|
||||
it('should convert to object primitive string', () => {
|
||||
expect(Object.entries('ab')).toEqual([['0', 'a'], ['1', 'b']]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Object.values', () => {
|
||||
it('should have a length of 1', () => {
|
||||
expect(Object.values.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should check for type', () => {
|
||||
expect(Object.values.bind(null, null)).toThrow(TypeError(
|
||||
'Object.values called on non-object'
|
||||
));
|
||||
expect(Object.values.bind(null, [])).not.toThrow();
|
||||
expect(Object.values.bind(null, () => {})).not.toThrow();
|
||||
expect(Object.values.bind(null, {})).not.toThrow();
|
||||
});
|
||||
|
||||
it('should return enumerable values', () => {
|
||||
const foo = Object.defineProperties({}, {
|
||||
x: {value: 10, enumerable: true},
|
||||
y: {value: 20},
|
||||
});
|
||||
|
||||
expect(Object.values(foo)).toEqual([10]);
|
||||
|
||||
const bar = {x: 10, y: 20};
|
||||
expect(Object.values(bar)).toEqual([10, 20]);
|
||||
});
|
||||
|
||||
it('should work with proto-less objects', () => {
|
||||
const foo = Object.create(null, {
|
||||
x: {value: 10, enumerable: true},
|
||||
y: {value: 20},
|
||||
});
|
||||
|
||||
expect(Object.values(foo)).toEqual([10]);
|
||||
});
|
||||
|
||||
it('should return only own values', () => {
|
||||
const foo = Object.create({z: 30}, {
|
||||
x: {value: 10, enumerable: true},
|
||||
y: {value: 20},
|
||||
});
|
||||
|
||||
expect(Object.values(foo)).toEqual([10]);
|
||||
});
|
||||
|
||||
it('should convert to object primitive string', () => {
|
||||
expect(Object.values('ab')).toEqual(['a', 'b']);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,247 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
*/
|
||||
|
||||
/* eslint-disable */
|
||||
|
||||
// Created by running:
|
||||
// require('babel-core').buildExternalHelpers('_extends classCallCheck createClass createRawReactElement defineProperty get inherits interopRequireDefault interopRequireWildcard objectWithoutProperties possibleConstructorReturn slicedToArray taggedTemplateLiteral toArray toConsumableArray '.split(' '))
|
||||
// then replacing the `global` reference in the last line to also use `this`.
|
||||
//
|
||||
// actually, that's a lie, because babel6 omits _extends and createRawReactElement
|
||||
|
||||
var babelHelpers = global.babelHelpers = {};
|
||||
|
||||
babelHelpers.typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) {
|
||||
return typeof obj;
|
||||
} : function (obj) {
|
||||
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
|
||||
};
|
||||
|
||||
babelHelpers.createRawReactElement = (function () {
|
||||
var REACT_ELEMENT_TYPE = typeof Symbol === "function" && Symbol.for && Symbol.for("react.element") || 0xeac7;
|
||||
return function createRawReactElement(type, key, props) {
|
||||
return {
|
||||
$$typeof: REACT_ELEMENT_TYPE,
|
||||
type: type,
|
||||
key: key,
|
||||
ref: null,
|
||||
props: props,
|
||||
_owner: null
|
||||
};
|
||||
};
|
||||
})();
|
||||
|
||||
babelHelpers.classCallCheck = function (instance, Constructor) {
|
||||
if (!(instance instanceof Constructor)) {
|
||||
throw new TypeError("Cannot call a class as a function");
|
||||
}
|
||||
};
|
||||
|
||||
babelHelpers.createClass = (function () {
|
||||
function defineProperties(target, props) {
|
||||
for (var i = 0; i < props.length; i++) {
|
||||
var descriptor = props[i];
|
||||
descriptor.enumerable = descriptor.enumerable || false;
|
||||
descriptor.configurable = true;
|
||||
if ("value" in descriptor) descriptor.writable = true;
|
||||
Object.defineProperty(target, descriptor.key, descriptor);
|
||||
}
|
||||
}
|
||||
|
||||
return function (Constructor, protoProps, staticProps) {
|
||||
if (protoProps) defineProperties(Constructor.prototype, protoProps);
|
||||
if (staticProps) defineProperties(Constructor, staticProps);
|
||||
return Constructor;
|
||||
};
|
||||
})();
|
||||
|
||||
babelHelpers.defineEnumerableProperties = function(obj, descs) {
|
||||
for (var key in descs) {
|
||||
var desc = descs[key];
|
||||
desc.configurable = (desc.enumerable = true);
|
||||
if ('value' in desc) desc.writable = true;
|
||||
Object.defineProperty(obj, key, desc);
|
||||
}
|
||||
return obj;
|
||||
};
|
||||
|
||||
babelHelpers.defineProperty = function (obj, key, value) {
|
||||
if (key in obj) {
|
||||
Object.defineProperty(obj, key, {
|
||||
value: value,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true
|
||||
});
|
||||
} else {
|
||||
obj[key] = value;
|
||||
}
|
||||
|
||||
return obj;
|
||||
};
|
||||
|
||||
babelHelpers._extends = babelHelpers.extends = Object.assign || function (target) {
|
||||
for (var i = 1; i < arguments.length; i++) {
|
||||
var source = arguments[i];
|
||||
|
||||
for (var key in source) {
|
||||
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
||||
target[key] = source[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return target;
|
||||
};
|
||||
|
||||
babelHelpers.get = function get(object, property, receiver) {
|
||||
if (object === null) object = Function.prototype;
|
||||
var desc = Object.getOwnPropertyDescriptor(object, property);
|
||||
|
||||
if (desc === undefined) {
|
||||
var parent = Object.getPrototypeOf(object);
|
||||
|
||||
if (parent === null) {
|
||||
return undefined;
|
||||
} else {
|
||||
return get(parent, property, receiver);
|
||||
}
|
||||
} else if ("value" in desc) {
|
||||
return desc.value;
|
||||
} else {
|
||||
var getter = desc.get;
|
||||
|
||||
if (getter === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return getter.call(receiver);
|
||||
}
|
||||
};
|
||||
|
||||
babelHelpers.inherits = function (subClass, superClass) {
|
||||
if (typeof superClass !== "function" && superClass !== null) {
|
||||
throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
|
||||
}
|
||||
|
||||
subClass.prototype = Object.create(superClass && superClass.prototype, {
|
||||
constructor: {
|
||||
value: subClass,
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true
|
||||
}
|
||||
});
|
||||
if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
|
||||
};
|
||||
|
||||
babelHelpers.interopRequireDefault = function (obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
};
|
||||
|
||||
babelHelpers.interopRequireWildcard = function (obj) {
|
||||
if (obj && obj.__esModule) {
|
||||
return obj;
|
||||
} else {
|
||||
var newObj = {};
|
||||
|
||||
if (obj != null) {
|
||||
for (var key in obj) {
|
||||
if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key];
|
||||
}
|
||||
}
|
||||
|
||||
newObj.default = obj;
|
||||
return newObj;
|
||||
}
|
||||
};
|
||||
|
||||
babelHelpers.objectWithoutProperties = function (obj, keys) {
|
||||
var target = {};
|
||||
|
||||
for (var i in obj) {
|
||||
if (keys.indexOf(i) >= 0) continue;
|
||||
if (!Object.prototype.hasOwnProperty.call(obj, i)) continue;
|
||||
target[i] = obj[i];
|
||||
}
|
||||
|
||||
return target;
|
||||
};
|
||||
|
||||
babelHelpers.possibleConstructorReturn = function (self, call) {
|
||||
if (!self) {
|
||||
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
|
||||
}
|
||||
|
||||
return call && (typeof call === "object" || typeof call === "function") ? call : self;
|
||||
};
|
||||
|
||||
babelHelpers.slicedToArray = (function () {
|
||||
function sliceIterator(arr, i) {
|
||||
var _arr = [];
|
||||
var _n = true;
|
||||
var _d = false;
|
||||
var _e = undefined;
|
||||
|
||||
try {
|
||||
for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {
|
||||
_arr.push(_s.value);
|
||||
|
||||
if (i && _arr.length === i) break;
|
||||
}
|
||||
} catch (err) {
|
||||
_d = true;
|
||||
_e = err;
|
||||
} finally {
|
||||
try {
|
||||
if (!_n && _i["return"]) _i["return"]();
|
||||
} finally {
|
||||
if (_d) throw _e;
|
||||
}
|
||||
}
|
||||
|
||||
return _arr;
|
||||
}
|
||||
|
||||
return function (arr, i) {
|
||||
if (Array.isArray(arr)) {
|
||||
return arr;
|
||||
} else if (Symbol.iterator in Object(arr)) {
|
||||
return sliceIterator(arr, i);
|
||||
} else {
|
||||
throw new TypeError("Invalid attempt to destructure non-iterable instance");
|
||||
}
|
||||
};
|
||||
})();
|
||||
|
||||
babelHelpers.taggedTemplateLiteral = function (strings, raw) {
|
||||
return Object.freeze(Object.defineProperties(strings, {
|
||||
raw: {
|
||||
value: Object.freeze(raw)
|
||||
}
|
||||
}));
|
||||
};
|
||||
|
||||
babelHelpers.toArray = function (arr) {
|
||||
return Array.isArray(arr) ? arr : Array.from(arr);
|
||||
};
|
||||
|
||||
babelHelpers.toConsumableArray = function (arr) {
|
||||
if (Array.isArray(arr)) {
|
||||
for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i];
|
||||
|
||||
return arr2;
|
||||
} else {
|
||||
return Array.from(arr);
|
||||
}
|
||||
};
|
|
@ -1,514 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
* @nolint
|
||||
*/
|
||||
|
||||
/* eslint-disable */
|
||||
|
||||
/**
|
||||
* This pipes all of our console logging functions to native logging so that
|
||||
* JavaScript errors in required modules show up in Xcode via NSLog.
|
||||
*/
|
||||
const inspect = (function() {
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
//
|
||||
// https://github.com/joyent/node/blob/master/lib/util.js
|
||||
|
||||
function inspect(obj, opts) {
|
||||
var ctx = {
|
||||
seen: [],
|
||||
stylize: stylizeNoColor
|
||||
};
|
||||
return formatValue(ctx, obj, opts.depth);
|
||||
}
|
||||
|
||||
function stylizeNoColor(str, styleType) {
|
||||
return str;
|
||||
}
|
||||
|
||||
function arrayToHash(array) {
|
||||
var hash = {};
|
||||
|
||||
array.forEach(function(val, idx) {
|
||||
hash[val] = true;
|
||||
});
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
function formatValue(ctx, value, recurseTimes) {
|
||||
// Primitive types cannot have properties
|
||||
var primitive = formatPrimitive(ctx, value);
|
||||
if (primitive) {
|
||||
return primitive;
|
||||
}
|
||||
|
||||
// Look up the keys of the object.
|
||||
var keys = Object.keys(value);
|
||||
var visibleKeys = arrayToHash(keys);
|
||||
|
||||
// IE doesn't make error fields non-enumerable
|
||||
// http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx
|
||||
if (isError(value)
|
||||
&& (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) {
|
||||
return formatError(value);
|
||||
}
|
||||
|
||||
// Some type of object without properties can be shortcutted.
|
||||
if (keys.length === 0) {
|
||||
if (isFunction(value)) {
|
||||
var name = value.name ? ': ' + value.name : '';
|
||||
return ctx.stylize('[Function' + name + ']', 'special');
|
||||
}
|
||||
if (isRegExp(value)) {
|
||||
return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
|
||||
}
|
||||
if (isDate(value)) {
|
||||
return ctx.stylize(Date.prototype.toString.call(value), 'date');
|
||||
}
|
||||
if (isError(value)) {
|
||||
return formatError(value);
|
||||
}
|
||||
}
|
||||
|
||||
var base = '', array = false, braces = ['{', '}'];
|
||||
|
||||
// Make Array say that they are Array
|
||||
if (isArray(value)) {
|
||||
array = true;
|
||||
braces = ['[', ']'];
|
||||
}
|
||||
|
||||
// Make functions say that they are functions
|
||||
if (isFunction(value)) {
|
||||
var n = value.name ? ': ' + value.name : '';
|
||||
base = ' [Function' + n + ']';
|
||||
}
|
||||
|
||||
// Make RegExps say that they are RegExps
|
||||
if (isRegExp(value)) {
|
||||
base = ' ' + RegExp.prototype.toString.call(value);
|
||||
}
|
||||
|
||||
// Make dates with properties first say the date
|
||||
if (isDate(value)) {
|
||||
base = ' ' + Date.prototype.toUTCString.call(value);
|
||||
}
|
||||
|
||||
// Make error with message first say the error
|
||||
if (isError(value)) {
|
||||
base = ' ' + formatError(value);
|
||||
}
|
||||
|
||||
if (keys.length === 0 && (!array || value.length == 0)) {
|
||||
return braces[0] + base + braces[1];
|
||||
}
|
||||
|
||||
if (recurseTimes < 0) {
|
||||
if (isRegExp(value)) {
|
||||
return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
|
||||
} else {
|
||||
return ctx.stylize('[Object]', 'special');
|
||||
}
|
||||
}
|
||||
|
||||
ctx.seen.push(value);
|
||||
|
||||
var output;
|
||||
if (array) {
|
||||
output = formatArray(ctx, value, recurseTimes, visibleKeys, keys);
|
||||
} else {
|
||||
output = keys.map(function(key) {
|
||||
return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array);
|
||||
});
|
||||
}
|
||||
|
||||
ctx.seen.pop();
|
||||
|
||||
return reduceToSingleString(output, base, braces);
|
||||
}
|
||||
|
||||
|
||||
function formatPrimitive(ctx, value) {
|
||||
if (isUndefined(value))
|
||||
return ctx.stylize('undefined', 'undefined');
|
||||
if (isString(value)) {
|
||||
var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '')
|
||||
.replace(/'/g, "\\'")
|
||||
.replace(/\\"/g, '"') + '\'';
|
||||
return ctx.stylize(simple, 'string');
|
||||
}
|
||||
if (isNumber(value))
|
||||
return ctx.stylize('' + value, 'number');
|
||||
if (isBoolean(value))
|
||||
return ctx.stylize('' + value, 'boolean');
|
||||
// For some reason typeof null is "object", so special case here.
|
||||
if (isNull(value))
|
||||
return ctx.stylize('null', 'null');
|
||||
}
|
||||
|
||||
|
||||
function formatError(value) {
|
||||
return '[' + Error.prototype.toString.call(value) + ']';
|
||||
}
|
||||
|
||||
|
||||
function formatArray(ctx, value, recurseTimes, visibleKeys, keys) {
|
||||
var output = [];
|
||||
for (var i = 0, l = value.length; i < l; ++i) {
|
||||
if (hasOwnProperty(value, String(i))) {
|
||||
output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
|
||||
String(i), true));
|
||||
} else {
|
||||
output.push('');
|
||||
}
|
||||
}
|
||||
keys.forEach(function(key) {
|
||||
if (!key.match(/^\d+$/)) {
|
||||
output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
|
||||
key, true));
|
||||
}
|
||||
});
|
||||
return output;
|
||||
}
|
||||
|
||||
|
||||
function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) {
|
||||
var name, str, desc;
|
||||
desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] };
|
||||
if (desc.get) {
|
||||
if (desc.set) {
|
||||
str = ctx.stylize('[Getter/Setter]', 'special');
|
||||
} else {
|
||||
str = ctx.stylize('[Getter]', 'special');
|
||||
}
|
||||
} else {
|
||||
if (desc.set) {
|
||||
str = ctx.stylize('[Setter]', 'special');
|
||||
}
|
||||
}
|
||||
if (!hasOwnProperty(visibleKeys, key)) {
|
||||
name = '[' + key + ']';
|
||||
}
|
||||
if (!str) {
|
||||
if (ctx.seen.indexOf(desc.value) < 0) {
|
||||
if (isNull(recurseTimes)) {
|
||||
str = formatValue(ctx, desc.value, null);
|
||||
} else {
|
||||
str = formatValue(ctx, desc.value, recurseTimes - 1);
|
||||
}
|
||||
if (str.indexOf('\n') > -1) {
|
||||
if (array) {
|
||||
str = str.split('\n').map(function(line) {
|
||||
return ' ' + line;
|
||||
}).join('\n').substr(2);
|
||||
} else {
|
||||
str = '\n' + str.split('\n').map(function(line) {
|
||||
return ' ' + line;
|
||||
}).join('\n');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
str = ctx.stylize('[Circular]', 'special');
|
||||
}
|
||||
}
|
||||
if (isUndefined(name)) {
|
||||
if (array && key.match(/^\d+$/)) {
|
||||
return str;
|
||||
}
|
||||
name = JSON.stringify('' + key);
|
||||
if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) {
|
||||
name = name.substr(1, name.length - 2);
|
||||
name = ctx.stylize(name, 'name');
|
||||
} else {
|
||||
name = name.replace(/'/g, "\\'")
|
||||
.replace(/\\"/g, '"')
|
||||
.replace(/(^"|"$)/g, "'");
|
||||
name = ctx.stylize(name, 'string');
|
||||
}
|
||||
}
|
||||
|
||||
return name + ': ' + str;
|
||||
}
|
||||
|
||||
|
||||
function reduceToSingleString(output, base, braces) {
|
||||
var numLinesEst = 0;
|
||||
var length = output.reduce(function(prev, cur) {
|
||||
numLinesEst++;
|
||||
if (cur.indexOf('\n') >= 0) numLinesEst++;
|
||||
return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1;
|
||||
}, 0);
|
||||
|
||||
if (length > 60) {
|
||||
return braces[0] +
|
||||
(base === '' ? '' : base + '\n ') +
|
||||
' ' +
|
||||
output.join(',\n ') +
|
||||
' ' +
|
||||
braces[1];
|
||||
}
|
||||
|
||||
return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1];
|
||||
}
|
||||
|
||||
|
||||
// NOTE: These type checking functions intentionally don't use `instanceof`
|
||||
// because it is fragile and can be easily faked with `Object.create()`.
|
||||
function isArray(ar) {
|
||||
return Array.isArray(ar);
|
||||
}
|
||||
|
||||
function isBoolean(arg) {
|
||||
return typeof arg === 'boolean';
|
||||
}
|
||||
|
||||
function isNull(arg) {
|
||||
return arg === null;
|
||||
}
|
||||
|
||||
function isNullOrUndefined(arg) {
|
||||
return arg == null;
|
||||
}
|
||||
|
||||
function isNumber(arg) {
|
||||
return typeof arg === 'number';
|
||||
}
|
||||
|
||||
function isString(arg) {
|
||||
return typeof arg === 'string';
|
||||
}
|
||||
|
||||
function isSymbol(arg) {
|
||||
return typeof arg === 'symbol';
|
||||
}
|
||||
|
||||
function isUndefined(arg) {
|
||||
return arg === void 0;
|
||||
}
|
||||
|
||||
function isRegExp(re) {
|
||||
return isObject(re) && objectToString(re) === '[object RegExp]';
|
||||
}
|
||||
|
||||
function isObject(arg) {
|
||||
return typeof arg === 'object' && arg !== null;
|
||||
}
|
||||
|
||||
function isDate(d) {
|
||||
return isObject(d) && objectToString(d) === '[object Date]';
|
||||
}
|
||||
|
||||
function isError(e) {
|
||||
return isObject(e) &&
|
||||
(objectToString(e) === '[object Error]' || e instanceof Error);
|
||||
}
|
||||
|
||||
function isFunction(arg) {
|
||||
return typeof arg === 'function';
|
||||
}
|
||||
|
||||
function isPrimitive(arg) {
|
||||
return arg === null ||
|
||||
typeof arg === 'boolean' ||
|
||||
typeof arg === 'number' ||
|
||||
typeof arg === 'string' ||
|
||||
typeof arg === 'symbol' || // ES6 symbol
|
||||
typeof arg === 'undefined';
|
||||
}
|
||||
|
||||
function objectToString(o) {
|
||||
return Object.prototype.toString.call(o);
|
||||
}
|
||||
|
||||
function hasOwnProperty(obj, prop) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, prop);
|
||||
}
|
||||
|
||||
return inspect;
|
||||
})();
|
||||
|
||||
|
||||
const OBJECT_COLUMN_NAME = '(index)';
|
||||
const LOG_LEVELS = {
|
||||
trace: 0,
|
||||
info: 1,
|
||||
warn: 2,
|
||||
error: 3
|
||||
};
|
||||
const INSPECTOR_LEVELS = [];
|
||||
INSPECTOR_LEVELS[LOG_LEVELS.trace] = 'debug';
|
||||
INSPECTOR_LEVELS[LOG_LEVELS.info] = 'log';
|
||||
INSPECTOR_LEVELS[LOG_LEVELS.warn] = 'warning';
|
||||
INSPECTOR_LEVELS[LOG_LEVELS.error] = 'error';
|
||||
|
||||
// Strip the inner function in getNativeLogFunction(), if in dev also
|
||||
// strip method printing to originalConsole.
|
||||
const INSPECTOR_FRAMES_TO_SKIP = __DEV__ ? 2 : 1;
|
||||
|
||||
function setupConsole(global) {
|
||||
if (!global.nativeLoggingHook) {
|
||||
return;
|
||||
}
|
||||
|
||||
function getNativeLogFunction(level) {
|
||||
return function() {
|
||||
let str;
|
||||
if (arguments.length === 1 && typeof arguments[0] === 'string') {
|
||||
str = arguments[0];
|
||||
} else {
|
||||
str = Array.prototype.map.call(arguments, function(arg) {
|
||||
return inspect(arg, {depth: 10});
|
||||
}).join(', ');
|
||||
}
|
||||
|
||||
let logLevel = level;
|
||||
if (str.slice(0, 9) === 'Warning: ' && logLevel >= LOG_LEVELS.error) {
|
||||
// React warnings use console.error so that a stack trace is shown,
|
||||
// but we don't (currently) want these to show a redbox
|
||||
// (Note: Logic duplicated in ExceptionsManager.js.)
|
||||
logLevel = LOG_LEVELS.warn;
|
||||
}
|
||||
if (global.__inspectorLog) {
|
||||
global.__inspectorLog(
|
||||
INSPECTOR_LEVELS[logLevel],
|
||||
str,
|
||||
[].slice.call(arguments),
|
||||
INSPECTOR_FRAMES_TO_SKIP);
|
||||
}
|
||||
global.nativeLoggingHook(str, logLevel);
|
||||
};
|
||||
}
|
||||
|
||||
function repeat(element, n) {
|
||||
return Array.apply(null, Array(n)).map(function() { return element; });
|
||||
};
|
||||
|
||||
function consoleTablePolyfill(rows) {
|
||||
// convert object -> array
|
||||
if (!Array.isArray(rows)) {
|
||||
var data = rows;
|
||||
rows = [];
|
||||
for (var key in data) {
|
||||
if (data.hasOwnProperty(key)) {
|
||||
var row = data[key];
|
||||
row[OBJECT_COLUMN_NAME] = key;
|
||||
rows.push(row);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (rows.length === 0) {
|
||||
global.nativeLoggingHook('', LOG_LEVELS.info);
|
||||
return;
|
||||
}
|
||||
|
||||
var columns = Object.keys(rows[0]).sort();
|
||||
var stringRows = [];
|
||||
var columnWidths = [];
|
||||
|
||||
// Convert each cell to a string. Also
|
||||
// figure out max cell width for each column
|
||||
columns.forEach(function(k, i) {
|
||||
columnWidths[i] = k.length;
|
||||
for (var j = 0; j < rows.length; j++) {
|
||||
var cellStr = (rows[j][k] || '?').toString();
|
||||
stringRows[j] = stringRows[j] || [];
|
||||
stringRows[j][i] = cellStr;
|
||||
columnWidths[i] = Math.max(columnWidths[i], cellStr.length);
|
||||
}
|
||||
});
|
||||
|
||||
// Join all elements in the row into a single string with | separators
|
||||
// (appends extra spaces to each cell to make separators | alligned)
|
||||
function joinRow(row, space) {
|
||||
var cells = row.map(function(cell, i) {
|
||||
var extraSpaces = repeat(' ', columnWidths[i] - cell.length).join('');
|
||||
return cell + extraSpaces;
|
||||
});
|
||||
space = space || ' ';
|
||||
return cells.join(space + '|' + space);
|
||||
};
|
||||
|
||||
var separators = columnWidths.map(function(columnWidth) {
|
||||
return repeat('-', columnWidth).join('');
|
||||
});
|
||||
var separatorRow = joinRow(separators, '-');
|
||||
var header = joinRow(columns);
|
||||
var table = [header, separatorRow];
|
||||
|
||||
for (var i = 0; i < rows.length; i++) {
|
||||
table.push(joinRow(stringRows[i]));
|
||||
}
|
||||
|
||||
// Notice extra empty line at the beginning.
|
||||
// Native logging hook adds "RCTLog >" at the front of every
|
||||
// logged string, which would shift the header and screw up
|
||||
// the table
|
||||
global.nativeLoggingHook('\n' + table.join('\n'), LOG_LEVELS.info);
|
||||
}
|
||||
|
||||
// Preserve the original `console` as `originalConsole`
|
||||
var originalConsole = global.console;
|
||||
var descriptor = Object.getOwnPropertyDescriptor(global, 'console');
|
||||
if (descriptor) {
|
||||
Object.defineProperty(global, 'originalConsole', descriptor);
|
||||
}
|
||||
|
||||
global.console = {
|
||||
error: getNativeLogFunction(LOG_LEVELS.error),
|
||||
info: getNativeLogFunction(LOG_LEVELS.info),
|
||||
log: getNativeLogFunction(LOG_LEVELS.info),
|
||||
warn: getNativeLogFunction(LOG_LEVELS.warn),
|
||||
trace: getNativeLogFunction(LOG_LEVELS.trace),
|
||||
debug: getNativeLogFunction(LOG_LEVELS.trace),
|
||||
table: consoleTablePolyfill
|
||||
};
|
||||
|
||||
// If available, also call the original `console` method since that is
|
||||
// sometimes useful. Ex: on OS X, this will let you see rich output in
|
||||
// the Safari Web Inspector console.
|
||||
if (__DEV__ && originalConsole) {
|
||||
Object.keys(console).forEach(methodName => {
|
||||
var reactNativeMethod = console[methodName];
|
||||
if (originalConsole[methodName]) {
|
||||
console[methodName] = function() {
|
||||
originalConsole[methodName](...arguments);
|
||||
reactNativeMethod.apply(console, arguments);
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof module !== 'undefined') {
|
||||
module.exports = setupConsole;
|
||||
} else {
|
||||
setupConsole(global);
|
||||
}
|
|
@ -1,90 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
*/
|
||||
|
||||
/* eslint-disable strict */
|
||||
|
||||
let _inGuard = 0;
|
||||
|
||||
/**
|
||||
* This is the error handler that is called when we encounter an exception
|
||||
* when loading a module. This will report any errors encountered before
|
||||
* ExceptionsManager is configured.
|
||||
*/
|
||||
let _globalHandler = function onError(e) {
|
||||
throw e;
|
||||
};
|
||||
|
||||
/**
|
||||
* The particular require runtime that we are using looks for a global
|
||||
* `ErrorUtils` object and if it exists, then it requires modules with the
|
||||
* error handler specified via ErrorUtils.setGlobalHandler by calling the
|
||||
* require function with applyWithGuard. Since the require module is loaded
|
||||
* before any of the modules, this ErrorUtils must be defined (and the handler
|
||||
* set) globally before requiring anything.
|
||||
*/
|
||||
const ErrorUtils = {
|
||||
setGlobalHandler(fun) {
|
||||
_globalHandler = fun;
|
||||
},
|
||||
getGlobalHandler() {
|
||||
return _globalHandler;
|
||||
},
|
||||
reportError(error) {
|
||||
_globalHandler && _globalHandler(error);
|
||||
},
|
||||
reportFatalError(error) {
|
||||
_globalHandler && _globalHandler(error, true);
|
||||
},
|
||||
applyWithGuard(fun, context, args) {
|
||||
try {
|
||||
_inGuard++;
|
||||
return fun.apply(context, args);
|
||||
} catch (e) {
|
||||
ErrorUtils.reportError(e);
|
||||
} finally {
|
||||
_inGuard--;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
applyWithGuardIfNeeded(fun, context, args) {
|
||||
if (ErrorUtils.inGuard()) {
|
||||
return fun.apply(context, args);
|
||||
} else {
|
||||
ErrorUtils.applyWithGuard(fun, context, args);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
inGuard() {
|
||||
return _inGuard;
|
||||
},
|
||||
guard(fun, name, context) {
|
||||
if (typeof fun !== 'function') {
|
||||
console.warn('A function must be passed to ErrorUtils.guard, got ', fun);
|
||||
return null;
|
||||
}
|
||||
name = name || fun.name || '<generated guard>';
|
||||
function guarded() {
|
||||
return (
|
||||
ErrorUtils.applyWithGuard(
|
||||
fun,
|
||||
context || this,
|
||||
arguments,
|
||||
null,
|
||||
name
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return guarded;
|
||||
},
|
||||
};
|
||||
|
||||
global.ErrorUtils = ErrorUtils;
|
|
@ -1,18 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2013-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
*/
|
||||
|
||||
/* eslint-disable strict */
|
||||
|
||||
global.__DEV__ = false;
|
||||
|
||||
global.__BUNDLE_START_TIME__ = global.nativePerformanceNow
|
||||
? global.nativePerformanceNow()
|
||||
: Date.now();
|
|
@ -1,18 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2013-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
*/
|
||||
|
||||
/* eslint-disable strict */
|
||||
|
||||
global.__DEV__ = true;
|
||||
|
||||
global.__BUNDLE_START_TIME__ = global.nativePerformanceNow
|
||||
? global.nativePerformanceNow()
|
||||
: Date.now();
|
|
@ -1,292 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2013-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @polyfill
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
declare var __DEV__: boolean;
|
||||
|
||||
type DependencyMap = Array<ModuleID>;
|
||||
type Exports = any;
|
||||
type FactoryFn = (
|
||||
global: Object,
|
||||
require: RequireFn,
|
||||
moduleObject: {exports: {}},
|
||||
exports: {},
|
||||
dependencyMap: ?DependencyMap,
|
||||
) => void;
|
||||
type HotModuleReloadingAcceptFn = Function;
|
||||
type HotModuleReloadingData = {|
|
||||
acceptCallback: ?HotModuleReloadingAcceptFn,
|
||||
accept: (callback: HotModuleReloadingAcceptFn) => void,
|
||||
|};
|
||||
type Module = {
|
||||
exports: Exports,
|
||||
hot?: HotModuleReloadingData,
|
||||
};
|
||||
type ModuleID = number;
|
||||
type ModuleDefinition = {|
|
||||
dependencyMap: ?DependencyMap,
|
||||
exports: Exports,
|
||||
factory: FactoryFn,
|
||||
hasError: boolean,
|
||||
error?: any,
|
||||
hot?: HotModuleReloadingData,
|
||||
isInitialized: boolean,
|
||||
verboseName?: string,
|
||||
|};
|
||||
type ModuleMap =
|
||||
{[key: ModuleID]: (ModuleDefinition)};
|
||||
type RequireFn = (id: ModuleID | VerboseModuleNameForDev) => Exports;
|
||||
type VerboseModuleNameForDev = string;
|
||||
|
||||
global.require = require;
|
||||
global.__d = define;
|
||||
|
||||
const modules: ModuleMap = Object.create(null);
|
||||
if (__DEV__) {
|
||||
var verboseNamesToModuleIds: {[key: string]: number} = Object.create(null);
|
||||
}
|
||||
|
||||
function define(
|
||||
factory: FactoryFn,
|
||||
moduleId: number,
|
||||
dependencyMap?: DependencyMap,
|
||||
) {
|
||||
if (moduleId in modules) {
|
||||
// prevent repeated calls to `global.nativeRequire` to overwrite modules
|
||||
// that are already loaded
|
||||
return;
|
||||
}
|
||||
modules[moduleId] = {
|
||||
dependencyMap,
|
||||
exports: undefined,
|
||||
factory,
|
||||
hasError: false,
|
||||
isInitialized: false,
|
||||
};
|
||||
if (__DEV__) {
|
||||
// HMR
|
||||
modules[moduleId].hot = createHotReloadingObject();
|
||||
|
||||
// DEBUGGABLE MODULES NAMES
|
||||
// we take `verboseName` from `arguments` to avoid an unused named parameter
|
||||
// in `define` in production.
|
||||
const verboseName: string | void = arguments[3];
|
||||
if (verboseName) {
|
||||
modules[moduleId].verboseName = verboseName;
|
||||
verboseNamesToModuleIds[verboseName] = moduleId;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function require(moduleId: ModuleID | VerboseModuleNameForDev) {
|
||||
if (__DEV__ && typeof moduleId === 'string') {
|
||||
const verboseName = moduleId;
|
||||
moduleId = verboseNamesToModuleIds[moduleId];
|
||||
if (moduleId == null) {
|
||||
throw new Error(`Unknown named module: '${verboseName}'`);
|
||||
} else {
|
||||
console.warn(
|
||||
`Requiring module '${verboseName}' by name is only supported for ` +
|
||||
'debugging purposes and will BREAK IN PRODUCTION!'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
//$FlowFixMe: at this point we know that moduleId is a number
|
||||
const moduleIdReallyIsNumber: number = moduleId;
|
||||
const module = modules[moduleIdReallyIsNumber];
|
||||
return module && module.isInitialized
|
||||
? module.exports
|
||||
: guardedLoadModule(moduleIdReallyIsNumber, module);
|
||||
}
|
||||
|
||||
let inGuard = false;
|
||||
function guardedLoadModule(moduleId: ModuleID, module) {
|
||||
if (!inGuard && global.ErrorUtils) {
|
||||
inGuard = true;
|
||||
let returnValue;
|
||||
try {
|
||||
returnValue = loadModuleImplementation(moduleId, module);
|
||||
} catch (e) {
|
||||
global.ErrorUtils.reportFatalError(e);
|
||||
}
|
||||
inGuard = false;
|
||||
return returnValue;
|
||||
} else {
|
||||
return loadModuleImplementation(moduleId, module);
|
||||
}
|
||||
}
|
||||
|
||||
function loadModuleImplementation(moduleId, module) {
|
||||
const nativeRequire = global.nativeRequire;
|
||||
if (!module && nativeRequire) {
|
||||
nativeRequire(moduleId);
|
||||
module = modules[moduleId];
|
||||
}
|
||||
|
||||
if (!module) {
|
||||
throw unknownModuleError(moduleId);
|
||||
}
|
||||
|
||||
if (module.hasError) {
|
||||
throw moduleThrewError(moduleId, module.error);
|
||||
}
|
||||
|
||||
// `require` calls int the require polyfill itself are not analyzed and
|
||||
// replaced so that they use numeric module IDs.
|
||||
// The systrace module will expose itself on the require function so that
|
||||
// it can be used here.
|
||||
// TODO(davidaurelio) Scan polyfills for dependencies, too (t9759686)
|
||||
if (__DEV__) {
|
||||
var {Systrace} = require;
|
||||
}
|
||||
|
||||
// We must optimistically mark module as initialized before running the
|
||||
// factory to keep any require cycles inside the factory from causing an
|
||||
// infinite require loop.
|
||||
module.isInitialized = true;
|
||||
const exports = module.exports = {};
|
||||
const {factory, dependencyMap} = module;
|
||||
try {
|
||||
if (__DEV__) {
|
||||
// $FlowFixMe: we know that __DEV__ is const and `Systrace` exists
|
||||
Systrace.beginEvent('JS_require_' + (module.verboseName || moduleId));
|
||||
}
|
||||
|
||||
const moduleObject: Module = {exports};
|
||||
if (__DEV__ && module.hot) {
|
||||
moduleObject.hot = module.hot;
|
||||
}
|
||||
|
||||
// keep args in sync with with defineModuleCode in
|
||||
// packager/src//Resolver/index.js
|
||||
// and packager/src//ModuleGraph/worker.js
|
||||
factory(global, require, moduleObject, exports, dependencyMap);
|
||||
|
||||
// avoid removing factory in DEV mode as it breaks HMR
|
||||
if (!__DEV__) {
|
||||
// $FlowFixMe: This is only sound because we never access `factory` again
|
||||
module.factory = undefined;
|
||||
module.dependencyMap = undefined;
|
||||
}
|
||||
|
||||
if (__DEV__) {
|
||||
// $FlowFixMe: we know that __DEV__ is const and `Systrace` exists
|
||||
Systrace.endEvent();
|
||||
}
|
||||
return (module.exports = moduleObject.exports);
|
||||
} catch (e) {
|
||||
module.hasError = true;
|
||||
module.error = e;
|
||||
module.isInitialized = false;
|
||||
module.exports = undefined;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
function unknownModuleError(id) {
|
||||
let message = 'Requiring unknown module "' + id + '".';
|
||||
if (__DEV__) {
|
||||
message +=
|
||||
'If you are sure the module is there, try restarting the packager. ' +
|
||||
'You may also want to run `npm install`, or `yarn` (depending on your environment).';
|
||||
}
|
||||
return Error(message);
|
||||
}
|
||||
|
||||
function moduleThrewError(id, error: any) {
|
||||
const displayName = __DEV__ && modules[id] && modules[id].verboseName || id;
|
||||
return Error('Requiring module "' + displayName + '", which threw an exception: ' + error);
|
||||
}
|
||||
|
||||
if (__DEV__) {
|
||||
require.Systrace = {beginEvent: () => {}, endEvent: () => {}};
|
||||
|
||||
// HOT MODULE RELOADING
|
||||
var createHotReloadingObject = function() {
|
||||
const hot: HotModuleReloadingData = {
|
||||
acceptCallback: null,
|
||||
accept: callback => { hot.acceptCallback = callback; },
|
||||
};
|
||||
return hot;
|
||||
};
|
||||
|
||||
const acceptAll = function(
|
||||
dependentModules,
|
||||
inverseDependencies,
|
||||
) {
|
||||
if (!dependentModules || dependentModules.length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const notAccepted = dependentModules.filter(
|
||||
module => !accept(module, /*factory*/ undefined, inverseDependencies));
|
||||
|
||||
const parents = [];
|
||||
for (let i = 0; i < notAccepted.length; i++) {
|
||||
// if the module has no parents then the change cannot be hot loaded
|
||||
if (inverseDependencies[notAccepted[i]].length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
parents.push(...inverseDependencies[notAccepted[i]]);
|
||||
}
|
||||
|
||||
return acceptAll(parents, inverseDependencies);
|
||||
};
|
||||
|
||||
const accept = function(
|
||||
id: ModuleID,
|
||||
factory?: FactoryFn,
|
||||
inverseDependencies: {[key: ModuleID]: Array<ModuleID>},
|
||||
) {
|
||||
const mod = modules[id];
|
||||
|
||||
if (!mod && factory) { // new modules need a factory
|
||||
define(factory, id);
|
||||
return true; // new modules don't need to be accepted
|
||||
}
|
||||
|
||||
const {hot} = mod;
|
||||
if (!hot) {
|
||||
console.warn(
|
||||
'Cannot accept module because Hot Module Replacement ' +
|
||||
'API was not installed.'
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// replace and initialize factory
|
||||
if (factory) {
|
||||
mod.factory = factory;
|
||||
}
|
||||
mod.hasError = false;
|
||||
mod.isInitialized = false;
|
||||
require(id);
|
||||
|
||||
if (hot.acceptCallback) {
|
||||
hot.acceptCallback();
|
||||
return true;
|
||||
} else {
|
||||
// need to have inverseDependencies to bubble up accept
|
||||
if (!inverseDependencies) {
|
||||
throw new Error('Undefined `inverseDependencies`');
|
||||
}
|
||||
|
||||
// accept parent modules recursively up until all siblings are accepted
|
||||
return acceptAll(inverseDependencies[id], inverseDependencies);
|
||||
}
|
||||
};
|
||||
|
||||
global.__accept = accept;
|
||||
}
|
|
@ -1,84 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const CRLF = '\r\n';
|
||||
const BOUNDARY = '3beqjf3apnqeu3h5jqorms4i';
|
||||
|
||||
class MultipartResponse {
|
||||
static wrap(req, res) {
|
||||
if (acceptsMultipartResponse(req)) {
|
||||
return new MultipartResponse(res);
|
||||
}
|
||||
// Ugly hack, ideally wrap function should always return a proxy
|
||||
// object with the same interface
|
||||
res.writeChunk = () => {}; // noop
|
||||
return res;
|
||||
}
|
||||
|
||||
constructor(res) {
|
||||
this.res = res;
|
||||
this.headers = {};
|
||||
|
||||
res.writeHead(200, {
|
||||
'Content-Type': `multipart/mixed; boundary="${BOUNDARY}"`,
|
||||
});
|
||||
res.write('If you are seeing this, your client does not support multipart response');
|
||||
}
|
||||
|
||||
writeChunk(headers, data, isLast = false) {
|
||||
let chunk = `${CRLF}--${BOUNDARY}${CRLF}`;
|
||||
if (headers) {
|
||||
chunk += MultipartResponse.serializeHeaders(headers) + CRLF + CRLF;
|
||||
}
|
||||
|
||||
if (data) {
|
||||
chunk += data;
|
||||
}
|
||||
|
||||
if (isLast) {
|
||||
chunk += `${CRLF}--${BOUNDARY}--${CRLF}`;
|
||||
}
|
||||
|
||||
this.res.write(chunk);
|
||||
}
|
||||
|
||||
writeHead(status, headers) {
|
||||
// We can't actually change the response HTTP status code
|
||||
// because the headers have already been sent
|
||||
this.setHeader('X-Http-Status', status);
|
||||
if (!headers) {
|
||||
return;
|
||||
}
|
||||
for (const key in headers) {
|
||||
this.setHeader(key, headers[key]);
|
||||
}
|
||||
}
|
||||
|
||||
setHeader(name, value) {
|
||||
this.headers[name] = value;
|
||||
}
|
||||
|
||||
end(data) {
|
||||
this.writeChunk(this.headers, data, true);
|
||||
this.res.end();
|
||||
}
|
||||
|
||||
static serializeHeaders(headers) {
|
||||
return Object.keys(headers)
|
||||
.map(key => `${key}: ${headers[key]}`)
|
||||
.join(CRLF);
|
||||
}
|
||||
}
|
||||
|
||||
function acceptsMultipartResponse(req) {
|
||||
return req.headers && req.headers.accept === 'multipart/mixed';
|
||||
}
|
||||
|
||||
module.exports = MultipartResponse;
|
|
@ -1,149 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.dontMock('../MultipartResponse');
|
||||
|
||||
const MultipartResponse = require('../MultipartResponse');
|
||||
|
||||
describe('MultipartResponse', () => {
|
||||
it('forwards calls to response', () => {
|
||||
const nreq = mockNodeRequest({accept: 'text/html'});
|
||||
const nres = mockNodeResponse();
|
||||
const res = MultipartResponse.wrap(nreq, nres);
|
||||
|
||||
expect(res).toBe(nres);
|
||||
|
||||
res.writeChunk({}, 'foo');
|
||||
expect(nres.write).not.toBeCalled();
|
||||
});
|
||||
|
||||
it('writes multipart response', () => {
|
||||
const nreq = mockNodeRequest({accept: 'multipart/mixed'});
|
||||
const nres = mockNodeResponse();
|
||||
const res = MultipartResponse.wrap(nreq, nres);
|
||||
|
||||
expect(res).not.toBe(nres);
|
||||
|
||||
res.setHeader('Result-Header-1', 1);
|
||||
res.writeChunk({foo: 'bar'}, 'first chunk');
|
||||
res.writeChunk({test: 2}, 'second chunk');
|
||||
res.writeChunk(null, 'empty headers third chunk');
|
||||
res.setHeader('Result-Header-2', 2);
|
||||
res.end('Hello, world!');
|
||||
|
||||
expect(nres.toString()).toEqual([
|
||||
'HTTP/1.1 200',
|
||||
'Content-Type: multipart/mixed; boundary="3beqjf3apnqeu3h5jqorms4i"',
|
||||
'',
|
||||
'If you are seeing this, your client does not support multipart response',
|
||||
'--3beqjf3apnqeu3h5jqorms4i',
|
||||
'foo: bar',
|
||||
'',
|
||||
'first chunk',
|
||||
'--3beqjf3apnqeu3h5jqorms4i',
|
||||
'test: 2',
|
||||
'',
|
||||
'second chunk',
|
||||
'--3beqjf3apnqeu3h5jqorms4i',
|
||||
'empty headers third chunk',
|
||||
'--3beqjf3apnqeu3h5jqorms4i',
|
||||
'Result-Header-1: 1',
|
||||
'Result-Header-2: 2',
|
||||
'',
|
||||
'Hello, world!',
|
||||
'--3beqjf3apnqeu3h5jqorms4i--',
|
||||
'',
|
||||
].join('\r\n'));
|
||||
});
|
||||
|
||||
it('sends status code as last chunk header', () => {
|
||||
const nreq = mockNodeRequest({accept: 'multipart/mixed'});
|
||||
const nres = mockNodeResponse();
|
||||
const res = MultipartResponse.wrap(nreq, nres);
|
||||
|
||||
res.writeChunk({foo: 'bar'}, 'first chunk');
|
||||
res.writeHead(500, {
|
||||
'Content-Type': 'application/json; boundary="3beqjf3apnqeu3h5jqorms4i"',
|
||||
});
|
||||
res.end('{}');
|
||||
|
||||
expect(nres.toString()).toEqual([
|
||||
'HTTP/1.1 200',
|
||||
'Content-Type: multipart/mixed; boundary="3beqjf3apnqeu3h5jqorms4i"',
|
||||
'',
|
||||
'If you are seeing this, your client does not support multipart response',
|
||||
'--3beqjf3apnqeu3h5jqorms4i',
|
||||
'foo: bar',
|
||||
'',
|
||||
'first chunk',
|
||||
'--3beqjf3apnqeu3h5jqorms4i',
|
||||
'X-Http-Status: 500',
|
||||
'Content-Type: application/json; boundary="3beqjf3apnqeu3h5jqorms4i"',
|
||||
'',
|
||||
'{}',
|
||||
'--3beqjf3apnqeu3h5jqorms4i--',
|
||||
'',
|
||||
].join('\r\n'));
|
||||
});
|
||||
|
||||
it('supports empty responses', () => {
|
||||
const nreq = mockNodeRequest({accept: 'multipart/mixed'});
|
||||
const nres = mockNodeResponse();
|
||||
const res = MultipartResponse.wrap(nreq, nres);
|
||||
|
||||
res.writeHead(304, {
|
||||
'Content-Type': 'application/json; boundary="3beqjf3apnqeu3h5jqorms4i"',
|
||||
});
|
||||
res.end();
|
||||
|
||||
expect(nres.toString()).toEqual([
|
||||
'HTTP/1.1 200',
|
||||
'Content-Type: multipart/mixed; boundary="3beqjf3apnqeu3h5jqorms4i"',
|
||||
'',
|
||||
'If you are seeing this, your client does not support multipart response',
|
||||
'--3beqjf3apnqeu3h5jqorms4i',
|
||||
'X-Http-Status: 304',
|
||||
'Content-Type: application/json; boundary="3beqjf3apnqeu3h5jqorms4i"',
|
||||
'',
|
||||
'',
|
||||
'--3beqjf3apnqeu3h5jqorms4i--',
|
||||
'',
|
||||
].join('\r\n'));
|
||||
});
|
||||
});
|
||||
|
||||
function mockNodeRequest(headers = {}) {
|
||||
return {headers};
|
||||
}
|
||||
|
||||
function mockNodeResponse() {
|
||||
let status = 200;
|
||||
let headers = {};
|
||||
let body = '';
|
||||
return {
|
||||
writeHead: jest.fn((st, hdrs) => {
|
||||
status = st;
|
||||
headers = {...headers, ...hdrs};
|
||||
}),
|
||||
setHeader: jest.fn((key, val) => { headers[key] = val; }),
|
||||
write: jest.fn(data => { body += data; }),
|
||||
end: jest.fn(data => { body += (data || ''); }),
|
||||
|
||||
// For testing only
|
||||
toString() {
|
||||
return [
|
||||
`HTTP/1.1 ${status}`,
|
||||
MultipartResponse.serializeHeaders(headers),
|
||||
'',
|
||||
body,
|
||||
].join('\r\n');
|
||||
},
|
||||
};
|
||||
}
|
|
@ -1,542 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
jest.mock('../../worker-farm', () => () => () => {})
|
||||
.mock('worker-farm', () => () => () => {})
|
||||
.mock('timers', () => ({setImmediate: fn => setTimeout(fn, 0)}))
|
||||
.mock('uglify-js')
|
||||
.mock('crypto')
|
||||
.mock(
|
||||
'../symbolicate',
|
||||
() => ({createWorker: jest.fn().mockReturnValue(jest.fn())}),
|
||||
)
|
||||
.mock('../../Bundler')
|
||||
.mock('../../AssetServer')
|
||||
.mock('../../node-haste/DependencyGraph')
|
||||
.mock('../../Logger')
|
||||
.mock('../../lib/GlobalTransformCache');
|
||||
|
||||
describe('processRequest', () => {
|
||||
let Bundler, Server, AssetServer, Promise, symbolicate;
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
Bundler = require('../../Bundler');
|
||||
Server = require('../');
|
||||
AssetServer = require('../../AssetServer');
|
||||
Promise = require('promise');
|
||||
symbolicate = require('../symbolicate');
|
||||
});
|
||||
|
||||
let server;
|
||||
|
||||
const options = {
|
||||
projectRoots: ['root'],
|
||||
blacklistRE: null,
|
||||
cacheVersion: null,
|
||||
polyfillModuleNames: null,
|
||||
reporter: require('../../lib/reporting').nullReporter,
|
||||
};
|
||||
|
||||
const makeRequest = (reqHandler, requrl, reqOptions) => new Promise(resolve =>
|
||||
reqHandler(
|
||||
{url: requrl, headers:{}, ...reqOptions},
|
||||
{
|
||||
statusCode: 200,
|
||||
headers: {},
|
||||
getHeader(header) { return this.headers[header]; },
|
||||
setHeader(header, value) { this.headers[header] = value; },
|
||||
writeHead(statusCode) { this.statusCode = statusCode; },
|
||||
end(body) {
|
||||
this.body = body;
|
||||
resolve(this);
|
||||
},
|
||||
},
|
||||
{next: () => {}},
|
||||
)
|
||||
);
|
||||
|
||||
const invalidatorFunc = jest.fn();
|
||||
let requestHandler;
|
||||
|
||||
beforeEach(() => {
|
||||
Bundler.prototype.bundle = jest.fn(() =>
|
||||
Promise.resolve({
|
||||
getModules: () => [],
|
||||
getSource: () => 'this is the source',
|
||||
getSourceMap: () => ({version: 3}),
|
||||
getSourceMapString: () => 'this is the source map',
|
||||
getEtag: () => 'this is an etag',
|
||||
}));
|
||||
|
||||
Bundler.prototype.invalidateFile = invalidatorFunc;
|
||||
Bundler.prototype.getResolver =
|
||||
jest.fn().mockReturnValue(Promise.resolve({
|
||||
getDependencyGraph: jest.fn().mockReturnValue({
|
||||
getHasteMap: jest.fn().mockReturnValue({on: jest.fn()}),
|
||||
load: jest.fn(() => Promise.resolve()),
|
||||
}),
|
||||
}));
|
||||
|
||||
server = new Server(options);
|
||||
requestHandler = server.processRequest.bind(server);
|
||||
});
|
||||
|
||||
it('returns JS bundle source on request of *.bundle', () => {
|
||||
return makeRequest(
|
||||
requestHandler,
|
||||
'mybundle.bundle?runModule=true',
|
||||
null
|
||||
).then(response =>
|
||||
expect(response.body).toEqual('this is the source')
|
||||
);
|
||||
});
|
||||
|
||||
it('returns JS bundle source on request of *.bundle (compat)', () => {
|
||||
return makeRequest(
|
||||
requestHandler,
|
||||
'mybundle.runModule.bundle'
|
||||
).then(response =>
|
||||
expect(response.body).toEqual('this is the source')
|
||||
);
|
||||
});
|
||||
|
||||
it('returns ETag header on request of *.bundle', () => {
|
||||
return makeRequest(
|
||||
requestHandler,
|
||||
'mybundle.bundle?runModule=true'
|
||||
).then(response => {
|
||||
expect(response.getHeader('ETag')).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
it('returns 304 on request of *.bundle when if-none-match equals the ETag', () => {
|
||||
return makeRequest(
|
||||
requestHandler,
|
||||
'mybundle.bundle?runModule=true',
|
||||
{headers : {'if-none-match' : 'this is an etag'}}
|
||||
).then(response => {
|
||||
expect(response.statusCode).toEqual(304);
|
||||
});
|
||||
});
|
||||
|
||||
it('returns sourcemap on request of *.map', () => {
|
||||
return makeRequest(
|
||||
requestHandler,
|
||||
'mybundle.map?runModule=true'
|
||||
).then(response =>
|
||||
expect(response.body).toEqual('this is the source map')
|
||||
);
|
||||
});
|
||||
|
||||
it('works with .ios.js extension', () => {
|
||||
return makeRequest(
|
||||
requestHandler,
|
||||
'index.ios.includeRequire.bundle'
|
||||
).then(response => {
|
||||
expect(response.body).toEqual('this is the source');
|
||||
expect(Bundler.prototype.bundle).toBeCalledWith({
|
||||
assetPlugins: [],
|
||||
dev: true,
|
||||
entryFile: 'index.ios.js',
|
||||
entryModuleOnly: false,
|
||||
generateSourceMaps: false,
|
||||
hot: false,
|
||||
inlineSourceMap: false,
|
||||
isolateModuleIDs: false,
|
||||
minify: false,
|
||||
onProgress: jasmine.any(Function),
|
||||
platform: null,
|
||||
resolutionResponse: null,
|
||||
runBeforeMainModule: ['InitializeCore'],
|
||||
runModule: true,
|
||||
sourceMapUrl: 'index.ios.includeRequire.map',
|
||||
unbundle: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('passes in the platform param', function() {
|
||||
return makeRequest(
|
||||
requestHandler,
|
||||
'index.bundle?platform=ios'
|
||||
).then(function(response) {
|
||||
expect(response.body).toEqual('this is the source');
|
||||
expect(Bundler.prototype.bundle).toBeCalledWith({
|
||||
assetPlugins: [],
|
||||
dev: true,
|
||||
entryFile: 'index.js',
|
||||
entryModuleOnly: false,
|
||||
generateSourceMaps: false,
|
||||
hot: false,
|
||||
inlineSourceMap: false,
|
||||
isolateModuleIDs: false,
|
||||
minify: false,
|
||||
onProgress: jasmine.any(Function),
|
||||
platform: 'ios',
|
||||
resolutionResponse: null,
|
||||
runBeforeMainModule: ['InitializeCore'],
|
||||
runModule: true,
|
||||
sourceMapUrl: 'index.map?platform=ios',
|
||||
unbundle: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('passes in the assetPlugin param', function() {
|
||||
return makeRequest(
|
||||
requestHandler,
|
||||
'index.bundle?assetPlugin=assetPlugin1&assetPlugin=assetPlugin2'
|
||||
).then(function(response) {
|
||||
expect(response.body).toEqual('this is the source');
|
||||
expect(Bundler.prototype.bundle).toBeCalledWith({
|
||||
assetPlugins: ['assetPlugin1', 'assetPlugin2'],
|
||||
dev: true,
|
||||
entryFile: 'index.js',
|
||||
entryModuleOnly: false,
|
||||
generateSourceMaps: false,
|
||||
hot: false,
|
||||
inlineSourceMap: false,
|
||||
isolateModuleIDs: false,
|
||||
minify: false,
|
||||
onProgress: jasmine.any(Function),
|
||||
platform: null,
|
||||
resolutionResponse: null,
|
||||
runBeforeMainModule: ['InitializeCore'],
|
||||
runModule: true,
|
||||
sourceMapUrl: 'index.map?assetPlugin=assetPlugin1&assetPlugin=assetPlugin2',
|
||||
unbundle: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('file changes', () => {
|
||||
|
||||
it('does not rebuild the bundles that contain a file when that file is changed', () => {
|
||||
const bundleFunc = jest.fn();
|
||||
bundleFunc
|
||||
.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
getModules: () => [],
|
||||
getSource: () => 'this is the first source',
|
||||
getSourceMap: () => {},
|
||||
getSourceMapString: () => 'this is the source map',
|
||||
getEtag: () => () => 'this is an etag',
|
||||
})
|
||||
)
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
getModules: () => [],
|
||||
getSource: () => 'this is the rebuilt source',
|
||||
getSourceMap: () => {},
|
||||
getSourceMapString: () => 'this is the source map',
|
||||
getEtag: () => () => 'this is an etag',
|
||||
})
|
||||
);
|
||||
|
||||
Bundler.prototype.bundle = bundleFunc;
|
||||
|
||||
server = new Server(options);
|
||||
|
||||
requestHandler = server.processRequest.bind(server);
|
||||
|
||||
makeRequest(requestHandler, 'mybundle.bundle?runModule=true')
|
||||
.done(response => {
|
||||
expect(response.body).toEqual('this is the first source');
|
||||
expect(bundleFunc.mock.calls.length).toBe(1);
|
||||
});
|
||||
|
||||
jest.runAllTicks();
|
||||
|
||||
server.onFileChange('all', options.projectRoots[0] + 'path/file.js');
|
||||
jest.runAllTimers();
|
||||
jest.runAllTicks();
|
||||
|
||||
expect(bundleFunc.mock.calls.length).toBe(1);
|
||||
|
||||
makeRequest(requestHandler, 'mybundle.bundle?runModule=true')
|
||||
.done(response =>
|
||||
expect(response.body).toEqual('this is the rebuilt source')
|
||||
);
|
||||
jest.runAllTicks();
|
||||
});
|
||||
|
||||
it(
|
||||
'does not rebuild the bundles that contain a file ' +
|
||||
'when that file is changed, even when hot loading is enabled',
|
||||
() => {
|
||||
const bundleFunc = jest.fn();
|
||||
bundleFunc
|
||||
.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
getModules: () => [],
|
||||
getSource: () => 'this is the first source',
|
||||
getSourceMap: () => {},
|
||||
getSourceMapString: () => 'this is the source map',
|
||||
getEtag: () => () => 'this is an etag',
|
||||
})
|
||||
)
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
getModules: () => [],
|
||||
getSource: () => 'this is the rebuilt source',
|
||||
getSourceMap: () => {},
|
||||
getSourceMapString: () => 'this is the source map',
|
||||
getEtag: () => () => 'this is an etag',
|
||||
})
|
||||
);
|
||||
|
||||
Bundler.prototype.bundle = bundleFunc;
|
||||
|
||||
server = new Server(options);
|
||||
server.setHMRFileChangeListener(() => {});
|
||||
|
||||
requestHandler = server.processRequest.bind(server);
|
||||
|
||||
makeRequest(requestHandler, 'mybundle.bundle?runModule=true')
|
||||
.done(response => {
|
||||
expect(response.body).toEqual('this is the first source');
|
||||
expect(bundleFunc.mock.calls.length).toBe(1);
|
||||
});
|
||||
|
||||
jest.runAllTicks();
|
||||
|
||||
server.onFileChange('all', options.projectRoots[0] + 'path/file.js');
|
||||
jest.runAllTimers();
|
||||
jest.runAllTicks();
|
||||
|
||||
expect(bundleFunc.mock.calls.length).toBe(1);
|
||||
server.setHMRFileChangeListener(null);
|
||||
|
||||
makeRequest(requestHandler, 'mybundle.bundle?runModule=true')
|
||||
.done(response => {
|
||||
expect(response.body).toEqual('this is the rebuilt source');
|
||||
expect(bundleFunc.mock.calls.length).toBe(2);
|
||||
});
|
||||
jest.runAllTicks();
|
||||
});
|
||||
});
|
||||
|
||||
describe('/onchange endpoint', () => {
|
||||
let EventEmitter;
|
||||
let req;
|
||||
let res;
|
||||
|
||||
beforeEach(() => {
|
||||
EventEmitter = require.requireActual('events').EventEmitter;
|
||||
req = scaffoldReq(new EventEmitter());
|
||||
req.url = '/onchange';
|
||||
res = {
|
||||
writeHead: jest.fn(),
|
||||
end: jest.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
it('should hold on to request and inform on change', () => {
|
||||
server.processRequest(req, res);
|
||||
server.onFileChange('all', options.projectRoots[0] + 'path/file.js');
|
||||
jest.runAllTimers();
|
||||
expect(res.end).toBeCalledWith(JSON.stringify({changed: true}));
|
||||
});
|
||||
|
||||
it('should not inform changes on disconnected clients', () => {
|
||||
server.processRequest(req, res);
|
||||
req.emit('close');
|
||||
jest.runAllTimers();
|
||||
server.onFileChange('all', options.projectRoots[0] + 'path/file.js');
|
||||
jest.runAllTimers();
|
||||
expect(res.end).not.toBeCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('/assets endpoint', () => {
|
||||
it('should serve simple case', () => {
|
||||
const req = scaffoldReq({url: '/assets/imgs/a.png'});
|
||||
const res = {end: jest.fn(), setHeader: jest.fn()};
|
||||
|
||||
AssetServer.prototype.get.mockImplementation(() => Promise.resolve('i am image'));
|
||||
|
||||
server.processRequest(req, res);
|
||||
jest.runAllTimers();
|
||||
expect(res.end).toBeCalledWith('i am image');
|
||||
});
|
||||
|
||||
it('should parse the platform option', () => {
|
||||
const req = scaffoldReq({url: '/assets/imgs/a.png?platform=ios'});
|
||||
const res = {end: jest.fn(), setHeader: jest.fn()};
|
||||
|
||||
AssetServer.prototype.get.mockImplementation(() => Promise.resolve('i am image'));
|
||||
|
||||
server.processRequest(req, res);
|
||||
jest.runAllTimers();
|
||||
expect(AssetServer.prototype.get).toBeCalledWith('imgs/a.png', 'ios');
|
||||
expect(res.end).toBeCalledWith('i am image');
|
||||
});
|
||||
|
||||
it('should serve range request', () => {
|
||||
const req = scaffoldReq({
|
||||
url: '/assets/imgs/a.png?platform=ios',
|
||||
headers: {range: 'bytes=0-3'},
|
||||
});
|
||||
const res = {end: jest.fn(), writeHead: jest.fn(), setHeader: jest.fn()};
|
||||
const mockData = 'i am image';
|
||||
|
||||
AssetServer.prototype.get.mockImplementation(() => Promise.resolve(mockData));
|
||||
|
||||
server.processRequest(req, res);
|
||||
jest.runAllTimers();
|
||||
expect(AssetServer.prototype.get).toBeCalledWith('imgs/a.png', 'ios');
|
||||
expect(res.end).toBeCalledWith(mockData.slice(0, 4));
|
||||
});
|
||||
|
||||
it('should serve assets files\'s name contain non-latin letter', () => {
|
||||
const req = scaffoldReq({url: '/assets/imgs/%E4%B8%BB%E9%A1%B5/logo.png'});
|
||||
const res = {end: jest.fn(), setHeader: jest.fn()};
|
||||
|
||||
AssetServer.prototype.get.mockImplementation(() => Promise.resolve('i am image'));
|
||||
|
||||
server.processRequest(req, res);
|
||||
jest.runAllTimers();
|
||||
expect(AssetServer.prototype.get).toBeCalledWith(
|
||||
'imgs/\u{4E3B}\u{9875}/logo.png',
|
||||
undefined
|
||||
);
|
||||
expect(res.end).toBeCalledWith('i am image');
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildbundle(options)', () => {
|
||||
it('Calls the bundler with the correct args', () => {
|
||||
return server.buildBundle({
|
||||
...Server.DEFAULT_BUNDLE_OPTIONS,
|
||||
entryFile: 'foo file',
|
||||
}).then(() =>
|
||||
expect(Bundler.prototype.bundle).toBeCalledWith({
|
||||
assetPlugins: [],
|
||||
dev: true,
|
||||
entryFile: 'foo file',
|
||||
entryModuleOnly: false,
|
||||
generateSourceMaps: false,
|
||||
hot: false,
|
||||
inlineSourceMap: false,
|
||||
isolateModuleIDs: false,
|
||||
minify: false,
|
||||
onProgress: null,
|
||||
platform: undefined,
|
||||
resolutionResponse: null,
|
||||
runBeforeMainModule: ['InitializeCore'],
|
||||
runModule: true,
|
||||
sourceMapUrl: null,
|
||||
unbundle: false,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildBundleFromUrl(options)', () => {
|
||||
it('Calls the bundler with the correct args', () => {
|
||||
return server.buildBundleFromUrl('/path/to/foo.bundle?dev=false&runModule=false')
|
||||
.then(() =>
|
||||
expect(Bundler.prototype.bundle).toBeCalledWith({
|
||||
assetPlugins: [],
|
||||
dev: false,
|
||||
entryFile: 'path/to/foo.js',
|
||||
entryModuleOnly: false,
|
||||
generateSourceMaps: true,
|
||||
hot: false,
|
||||
inlineSourceMap: false,
|
||||
isolateModuleIDs: false,
|
||||
minify: false,
|
||||
onProgress: null,
|
||||
platform: null,
|
||||
resolutionResponse: null,
|
||||
runBeforeMainModule: ['InitializeCore'],
|
||||
runModule: false,
|
||||
sourceMapUrl: '/path/to/foo.map?dev=false&runModule=false',
|
||||
unbundle: false,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('/symbolicate endpoint', () => {
|
||||
let symbolicationWorker;
|
||||
beforeEach(() => {
|
||||
symbolicationWorker = symbolicate.createWorker();
|
||||
symbolicationWorker.mockReset();
|
||||
});
|
||||
|
||||
it('should symbolicate given stack trace', () => {
|
||||
const inputStack = [{
|
||||
file: 'http://foo.bundle?platform=ios',
|
||||
lineNumber: 2100,
|
||||
column: 44,
|
||||
customPropShouldBeLeftUnchanged: 'foo',
|
||||
}];
|
||||
const outputStack = [{
|
||||
source: 'foo.js',
|
||||
line: 21,
|
||||
column: 4,
|
||||
}];
|
||||
const body = JSON.stringify({stack: inputStack});
|
||||
|
||||
expect.assertions(2);
|
||||
symbolicationWorker.mockImplementation(stack => {
|
||||
expect(stack).toEqual(inputStack);
|
||||
return outputStack;
|
||||
});
|
||||
|
||||
return makeRequest(
|
||||
requestHandler,
|
||||
'/symbolicate',
|
||||
{rawBody: body},
|
||||
).then(response =>
|
||||
expect(JSON.parse(response.body)).toEqual({stack: outputStack}));
|
||||
});
|
||||
});
|
||||
|
||||
describe('/symbolicate handles errors', () => {
|
||||
it('should symbolicate given stack trace', () => {
|
||||
const body = 'clearly-not-json';
|
||||
console.error = jest.fn();
|
||||
|
||||
return makeRequest(
|
||||
requestHandler,
|
||||
'/symbolicate',
|
||||
{rawBody: body}
|
||||
).then(response => {
|
||||
expect(response.statusCode).toEqual(500);
|
||||
expect(JSON.parse(response.body)).toEqual({
|
||||
error: jasmine.any(String),
|
||||
});
|
||||
expect(console.error).toBeCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('_getOptionsFromUrl', () => {
|
||||
it('ignores protocol, host and port of the passed in URL', () => {
|
||||
const short = '/path/to/entry-file.js??platform=ios&dev=true&minify=false';
|
||||
const long = `http://localhost:8081${short}`;
|
||||
expect(server._getOptionsFromUrl(long))
|
||||
.toEqual(server._getOptionsFromUrl(short));
|
||||
});
|
||||
});
|
||||
|
||||
// ensures that vital properties exist on fake request objects
|
||||
function scaffoldReq(req) {
|
||||
if (!req.headers) {
|
||||
req.headers = {};
|
||||
}
|
||||
return req;
|
||||
}
|
||||
});
|
|
@ -1,957 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const AssetServer = require('../AssetServer');
|
||||
const Bundler = require('../Bundler');
|
||||
const MultipartResponse = require('./MultipartResponse');
|
||||
|
||||
const defaults = require('../defaults');
|
||||
const emptyFunction = require('fbjs/lib/emptyFunction');
|
||||
const mime = require('mime-types');
|
||||
const parsePlatformFilePath = require('../node-haste/lib/parsePlatformFilePath');
|
||||
const path = require('path');
|
||||
const symbolicate = require('./symbolicate');
|
||||
const url = require('url');
|
||||
|
||||
const debug = require('debug')('RNP:Server');
|
||||
|
||||
import type Module, {HasteImpl} from '../node-haste/Module';
|
||||
import type {IncomingMessage, ServerResponse} from 'http';
|
||||
import type ResolutionResponse from '../node-haste/DependencyGraph/ResolutionResponse';
|
||||
import type Bundle from '../Bundler/Bundle';
|
||||
import type HMRBundle from '../Bundler/HMRBundle';
|
||||
import type {Reporter} from '../lib/reporting';
|
||||
import type {GetTransformOptions, PostProcessModules, PostMinifyProcess} from '../Bundler';
|
||||
import type {TransformCache} from '../lib/TransformCaching';
|
||||
import type {GlobalTransformCache} from '../lib/GlobalTransformCache';
|
||||
import type {SourceMap, Symbolicate} from './symbolicate';
|
||||
|
||||
const {
|
||||
createActionStartEntry,
|
||||
createActionEndEntry,
|
||||
log,
|
||||
} = require('../Logger');
|
||||
|
||||
function debounceAndBatch(fn, delay) {
|
||||
let args = [];
|
||||
let timeout;
|
||||
return value => {
|
||||
args.push(value);
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(() => {
|
||||
const a = args;
|
||||
args = [];
|
||||
fn(a);
|
||||
}, delay);
|
||||
};
|
||||
}
|
||||
|
||||
type Options = {
|
||||
assetExts?: Array<string>,
|
||||
blacklistRE?: RegExp,
|
||||
cacheVersion?: string,
|
||||
extraNodeModules?: {},
|
||||
getTransformOptions?: GetTransformOptions,
|
||||
globalTransformCache: ?GlobalTransformCache,
|
||||
hasteImpl?: HasteImpl,
|
||||
moduleFormat?: string,
|
||||
platforms?: Array<string>,
|
||||
polyfillModuleNames?: Array<string>,
|
||||
postProcessModules?: PostProcessModules,
|
||||
postMinifyProcess: PostMinifyProcess,
|
||||
projectRoots: $ReadOnlyArray<string>,
|
||||
providesModuleNodeModules?: Array<string>,
|
||||
reporter: Reporter,
|
||||
resetCache?: boolean,
|
||||
silent?: boolean,
|
||||
+sourceExts: ?Array<string>,
|
||||
+transformCache: TransformCache,
|
||||
+transformModulePath: string,
|
||||
transformTimeoutInterval?: number,
|
||||
watch?: boolean,
|
||||
workerPath: ?string,
|
||||
};
|
||||
|
||||
export type BundleOptions = {
|
||||
+assetPlugins: Array<string>,
|
||||
dev: boolean,
|
||||
entryFile: string,
|
||||
+entryModuleOnly: boolean,
|
||||
+generateSourceMaps: boolean,
|
||||
+hot: boolean,
|
||||
+inlineSourceMap: boolean,
|
||||
+isolateModuleIDs: boolean,
|
||||
minify: boolean,
|
||||
onProgress: ?(doneCont: number, totalCount: number) => mixed,
|
||||
+platform: ?string,
|
||||
+resolutionResponse: ?{},
|
||||
+runBeforeMainModule: Array<string>,
|
||||
+runModule: boolean,
|
||||
sourceMapUrl: ?string,
|
||||
unbundle: boolean,
|
||||
};
|
||||
|
||||
type DependencyOptions = {|
|
||||
+dev: boolean,
|
||||
+entryFile: string,
|
||||
+hot: boolean,
|
||||
+minify: boolean,
|
||||
+platform: ?string,
|
||||
+recursive: boolean,
|
||||
|};
|
||||
|
||||
const bundleDeps = new WeakMap();
|
||||
const NODE_MODULES = `${path.sep}node_modules${path.sep}`;
|
||||
|
||||
class Server {
|
||||
|
||||
_opts: {
|
||||
assetExts: Array<string>,
|
||||
blacklistRE: void | RegExp,
|
||||
cacheVersion: string,
|
||||
extraNodeModules: {},
|
||||
getTransformOptions?: GetTransformOptions,
|
||||
hasteImpl?: HasteImpl,
|
||||
moduleFormat: string,
|
||||
platforms: Array<string>,
|
||||
polyfillModuleNames: Array<string>,
|
||||
postProcessModules?: PostProcessModules,
|
||||
postMinifyProcess: PostMinifyProcess,
|
||||
projectRoots: $ReadOnlyArray<string>,
|
||||
providesModuleNodeModules?: Array<string>,
|
||||
reporter: Reporter,
|
||||
resetCache: boolean,
|
||||
silent: boolean,
|
||||
+sourceExts: Array<string>,
|
||||
+transformCache: TransformCache,
|
||||
+transformModulePath: string,
|
||||
transformTimeoutInterval: ?number,
|
||||
watch: boolean,
|
||||
workerPath: ?string,
|
||||
};
|
||||
_projectRoots: $ReadOnlyArray<string>;
|
||||
_bundles: {};
|
||||
_changeWatchers: Array<{
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
}>;
|
||||
_fileChangeListeners: Array<(filePath: string) => mixed>;
|
||||
_assetServer: AssetServer;
|
||||
_bundler: Bundler;
|
||||
_debouncedFileChangeHandler: (filePath: string) => mixed;
|
||||
_hmrFileChangeListener: ?(type: string, filePath: string) => mixed;
|
||||
_reporter: Reporter;
|
||||
_symbolicateInWorker: Symbolicate;
|
||||
_platforms: Set<string>;
|
||||
_nextBundleBuildID: number;
|
||||
|
||||
constructor(options: Options) {
|
||||
this._opts = {
|
||||
assetExts: options.assetExts || defaults.assetExts,
|
||||
blacklistRE: options.blacklistRE,
|
||||
cacheVersion: options.cacheVersion || '1.0',
|
||||
extraNodeModules: options.extraNodeModules || {},
|
||||
getTransformOptions: options.getTransformOptions,
|
||||
globalTransformCache: options.globalTransformCache,
|
||||
hasteImpl: options.hasteImpl,
|
||||
moduleFormat: options.moduleFormat != null ? options.moduleFormat : 'haste',
|
||||
platforms: options.platforms || defaults.platforms,
|
||||
polyfillModuleNames: options.polyfillModuleNames || [],
|
||||
postProcessModules: options.postProcessModules,
|
||||
postMinifyProcess: options.postMinifyProcess,
|
||||
projectRoots: options.projectRoots,
|
||||
providesModuleNodeModules: options.providesModuleNodeModules,
|
||||
reporter: options.reporter,
|
||||
resetCache: options.resetCache || false,
|
||||
silent: options.silent || false,
|
||||
sourceExts: options.sourceExts || defaults.sourceExts,
|
||||
transformCache: options.transformCache,
|
||||
transformModulePath: options.transformModulePath,
|
||||
transformTimeoutInterval: options.transformTimeoutInterval,
|
||||
watch: options.watch || false,
|
||||
workerPath: options.workerPath,
|
||||
};
|
||||
|
||||
const processFileChange =
|
||||
({type, filePath}) => this.onFileChange(type, filePath);
|
||||
|
||||
this._reporter = options.reporter;
|
||||
this._projectRoots = this._opts.projectRoots;
|
||||
this._bundles = Object.create(null);
|
||||
this._changeWatchers = [];
|
||||
this._fileChangeListeners = [];
|
||||
this._platforms = new Set(this._opts.platforms);
|
||||
|
||||
this._assetServer = new AssetServer({
|
||||
assetExts: this._opts.assetExts,
|
||||
projectRoots: this._opts.projectRoots,
|
||||
});
|
||||
|
||||
const bundlerOpts = Object.create(this._opts);
|
||||
bundlerOpts.assetServer = this._assetServer;
|
||||
bundlerOpts.allowBundleUpdates = this._opts.watch;
|
||||
bundlerOpts.globalTransformCache = options.globalTransformCache;
|
||||
bundlerOpts.watch = this._opts.watch;
|
||||
bundlerOpts.reporter = options.reporter;
|
||||
this._bundler = new Bundler(bundlerOpts);
|
||||
|
||||
// changes to the haste map can affect resolution of files in the bundle
|
||||
this._bundler.getResolver().then(resolver => {
|
||||
resolver.getDependencyGraph().getWatcher().on(
|
||||
'change',
|
||||
({eventsQueue}) => eventsQueue.forEach(processFileChange),
|
||||
);
|
||||
});
|
||||
|
||||
this._debouncedFileChangeHandler = debounceAndBatch(filePaths => {
|
||||
// only clear bundles for non-JS changes
|
||||
if (filePaths.every(RegExp.prototype.test, /\.js(?:on)?$/i)) {
|
||||
for (const key in this._bundles) {
|
||||
this._bundles[key].then(bundle => {
|
||||
const deps = bundleDeps.get(bundle);
|
||||
filePaths.forEach(filePath => {
|
||||
// $FlowFixMe(>=0.37.0)
|
||||
if (deps.files.has(filePath)) {
|
||||
// $FlowFixMe(>=0.37.0)
|
||||
deps.outdated.add(filePath);
|
||||
}
|
||||
});
|
||||
}).catch(e => {
|
||||
debug(`Could not update bundle: ${e}, evicting from cache`);
|
||||
delete this._bundles[key];
|
||||
});
|
||||
}
|
||||
} else {
|
||||
debug('Clearing bundles due to non-JS change');
|
||||
this._clearBundles();
|
||||
}
|
||||
this._informChangeWatchers();
|
||||
}, 50);
|
||||
|
||||
this._symbolicateInWorker = symbolicate.createWorker();
|
||||
this._nextBundleBuildID = 1;
|
||||
}
|
||||
|
||||
end(): mixed {
|
||||
return this._bundler.end();
|
||||
}
|
||||
|
||||
setHMRFileChangeListener(listener: ?(type: string, filePath: string) => mixed) {
|
||||
this._hmrFileChangeListener = listener;
|
||||
}
|
||||
|
||||
addFileChangeListener(listener: (filePath: string) => mixed) {
|
||||
if (this._fileChangeListeners.indexOf(listener) === -1) {
|
||||
this._fileChangeListeners.push(listener);
|
||||
}
|
||||
}
|
||||
|
||||
async buildBundle(options: BundleOptions): Promise<Bundle> {
|
||||
const bundle = await this._bundler.bundle(options);
|
||||
const modules = bundle.getModules();
|
||||
const nonVirtual = modules.filter(m => !m.virtual);
|
||||
bundleDeps.set(bundle, {
|
||||
files: new Map(nonVirtual.map(({sourcePath, meta}) =>
|
||||
[sourcePath, meta != null ? meta.dependencies : []],
|
||||
)),
|
||||
idToIndex: new Map(modules.map(({id}, i) => [id, i])),
|
||||
dependencyPairs: new Map(
|
||||
nonVirtual
|
||||
.filter(({meta}) => meta && meta.dependencyPairs)
|
||||
/* $FlowFixMe: the filter above ensures `dependencyPairs` is not null. */
|
||||
.map(m => [m.sourcePath, m.meta.dependencyPairs])
|
||||
),
|
||||
outdated: new Set(),
|
||||
});
|
||||
return bundle;
|
||||
}
|
||||
|
||||
buildBundleFromUrl(reqUrl: string): Promise<Bundle> {
|
||||
const options = this._getOptionsFromUrl(reqUrl);
|
||||
return this.buildBundle(options);
|
||||
}
|
||||
|
||||
buildBundleForHMR(
|
||||
options: {platform: ?string},
|
||||
host: string,
|
||||
port: number,
|
||||
): Promise<HMRBundle> {
|
||||
return this._bundler.hmrBundle(options, host, port);
|
||||
}
|
||||
|
||||
getShallowDependencies(options: DependencyOptions): Promise<Array<Module>> {
|
||||
return Promise.resolve().then(() => {
|
||||
const platform = options.platform != null
|
||||
? options.platform : parsePlatformFilePath(options.entryFile, this._platforms).platform;
|
||||
const {entryFile, dev, minify, hot} = options;
|
||||
return this._bundler.getShallowDependencies(
|
||||
{entryFile, platform, dev, minify, hot, generateSourceMaps: false},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
getModuleForPath(entryFile: string): Promise<Module> {
|
||||
return this._bundler.getModuleForPath(entryFile);
|
||||
}
|
||||
|
||||
getDependencies(options: DependencyOptions): Promise<ResolutionResponse<Module, *>> {
|
||||
return Promise.resolve().then(() => {
|
||||
const platform = options.platform != null
|
||||
? options.platform : parsePlatformFilePath(options.entryFile, this._platforms).platform;
|
||||
const {entryFile, dev, minify, hot} = options;
|
||||
return this._bundler.getDependencies(
|
||||
{entryFile, platform, dev, minify, hot, generateSourceMaps: false},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
getOrderedDependencyPaths(options: {
|
||||
+entryFile: string,
|
||||
+dev: boolean,
|
||||
+platform: string,
|
||||
+minify: boolean,
|
||||
+generateSourceMaps: boolean,
|
||||
}): Promise<mixed> {
|
||||
return Promise.resolve().then(() => {
|
||||
return this._bundler.getOrderedDependencyPaths(options);
|
||||
});
|
||||
}
|
||||
|
||||
onFileChange(type: string, filePath: string) {
|
||||
this._assetServer.onFileChange(type, filePath);
|
||||
|
||||
// If Hot Loading is enabled avoid rebuilding bundles and sending live
|
||||
// updates. Instead, send the HMR updates right away and clear the bundles
|
||||
// cache so that if the user reloads we send them a fresh bundle
|
||||
const {_hmrFileChangeListener} = this;
|
||||
if (_hmrFileChangeListener) {
|
||||
// Clear cached bundles in case user reloads
|
||||
this._clearBundles();
|
||||
_hmrFileChangeListener(type, filePath);
|
||||
return;
|
||||
} else if (type !== 'change' && filePath.indexOf(NODE_MODULES) !== -1) {
|
||||
// node module resolution can be affected by added or removed files
|
||||
debug('Clearing bundles due to potential node_modules resolution change');
|
||||
this._clearBundles();
|
||||
}
|
||||
|
||||
Promise.all(
|
||||
this._fileChangeListeners.map(listener => listener(filePath))
|
||||
).then(
|
||||
() => this._onFileChangeComplete(filePath),
|
||||
() => this._onFileChangeComplete(filePath)
|
||||
);
|
||||
}
|
||||
|
||||
_onFileChangeComplete(filePath: string) {
|
||||
// Make sure the file watcher event runs through the system before
|
||||
// we rebuild the bundles.
|
||||
this._debouncedFileChangeHandler(filePath);
|
||||
}
|
||||
|
||||
_clearBundles() {
|
||||
this._bundles = Object.create(null);
|
||||
}
|
||||
|
||||
_informChangeWatchers() {
|
||||
const watchers = this._changeWatchers;
|
||||
const headers = {
|
||||
'Content-Type': 'application/json; charset=UTF-8',
|
||||
};
|
||||
|
||||
watchers.forEach(function(w) {
|
||||
w.res.writeHead(205, headers);
|
||||
w.res.end(JSON.stringify({changed: true}));
|
||||
});
|
||||
|
||||
this._changeWatchers = [];
|
||||
}
|
||||
|
||||
_processDebugRequest(reqUrl: string, res: ServerResponse) {
|
||||
let ret = '<!doctype html>';
|
||||
const pathname = url.parse(reqUrl).pathname;
|
||||
/* $FlowFixMe: pathname would be null for an invalid URL */
|
||||
const parts = pathname.split('/').filter(Boolean);
|
||||
if (parts.length === 1) {
|
||||
ret += '<div><a href="/debug/bundles">Cached Bundles</a></div>';
|
||||
res.end(ret);
|
||||
} else if (parts[1] === 'bundles') {
|
||||
ret += '<h1> Cached Bundles </h1>';
|
||||
Promise.all(Object.keys(this._bundles).map(optionsJson =>
|
||||
this._bundles[optionsJson].then(p => {
|
||||
ret += '<div><h2>' + optionsJson + '</h2>';
|
||||
ret += p.getDebugInfo();
|
||||
})
|
||||
)).then(
|
||||
() => res.end(ret),
|
||||
e => {
|
||||
res.writeHead(500);
|
||||
res.end('Internal Error');
|
||||
// FIXME: $FlowFixMe: that's a hack, doesn't work with JSON-mode output
|
||||
this._reporter.terminal && this._reporter.terminal.log(e.stack);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
res.writeHead(404);
|
||||
res.end('Invalid debug request');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
_processOnChangeRequest(req: IncomingMessage, res: ServerResponse) {
|
||||
const watchers = this._changeWatchers;
|
||||
|
||||
watchers.push({
|
||||
req,
|
||||
res,
|
||||
});
|
||||
|
||||
req.on('close', () => {
|
||||
for (let i = 0; i < watchers.length; i++) {
|
||||
if (watchers[i] && watchers[i].req === req) {
|
||||
watchers.splice(i, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_rangeRequestMiddleware(
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
data: string | Buffer,
|
||||
assetPath: string,
|
||||
) {
|
||||
if (req.headers && req.headers.range) {
|
||||
const [rangeStart, rangeEnd] = req.headers.range.replace(/bytes=/, '').split('-');
|
||||
const dataStart = parseInt(rangeStart, 10);
|
||||
const dataEnd = rangeEnd ? parseInt(rangeEnd, 10) : data.length - 1;
|
||||
const chunksize = (dataEnd - dataStart) + 1;
|
||||
|
||||
res.writeHead(206, {
|
||||
'Accept-Ranges': 'bytes',
|
||||
'Content-Length': chunksize.toString(),
|
||||
'Content-Range': `bytes ${dataStart}-${dataEnd}/${data.length}`,
|
||||
'Content-Type': mime.lookup(path.basename(assetPath[1])),
|
||||
});
|
||||
|
||||
return data.slice(dataStart, dataEnd + 1);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
_processAssetsRequest(req: IncomingMessage, res: ServerResponse) {
|
||||
const urlObj = url.parse(decodeURI(req.url), true);
|
||||
/* $FlowFixMe: could be empty if the url is invalid */
|
||||
const assetPath: string = urlObj.pathname.match(/^\/assets\/(.+)$/);
|
||||
|
||||
const processingAssetRequestLogEntry =
|
||||
log(createActionStartEntry({
|
||||
action_name: 'Processing asset request',
|
||||
asset: assetPath[1],
|
||||
}));
|
||||
|
||||
/* $FlowFixMe: query may be empty for invalid URLs */
|
||||
this._assetServer.get(assetPath[1], urlObj.query.platform)
|
||||
.then(
|
||||
data => {
|
||||
// Tell clients to cache this for 1 year.
|
||||
// This is safe as the asset url contains a hash of the asset.
|
||||
if (process.env.REACT_NATIVE_ENABLE_ASSET_CACHING === true) {
|
||||
res.setHeader('Cache-Control', 'max-age=31536000');
|
||||
}
|
||||
res.end(this._rangeRequestMiddleware(req, res, data, assetPath));
|
||||
process.nextTick(() => {
|
||||
log(createActionEndEntry(processingAssetRequestLogEntry));
|
||||
});
|
||||
},
|
||||
error => {
|
||||
console.error(error.stack);
|
||||
res.writeHead(404);
|
||||
res.end('Asset not found');
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
optionsHash(options: {}) {
|
||||
// onProgress is a function, can't be serialized
|
||||
return JSON.stringify(Object.assign({}, options, {onProgress: null}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure we properly report the promise of a build that's happening,
|
||||
* including failed builds. We use that separately for when we update a bundle
|
||||
* and for when we build for scratch.
|
||||
*/
|
||||
_reportBundlePromise(
|
||||
buildID: string,
|
||||
options: {entryFile: string},
|
||||
bundlePromise: Promise<Bundle>,
|
||||
): Promise<Bundle> {
|
||||
this._reporter.update({
|
||||
buildID,
|
||||
entryFilePath: options.entryFile,
|
||||
type: 'bundle_build_started',
|
||||
});
|
||||
return bundlePromise.then(bundle => {
|
||||
this._reporter.update({
|
||||
buildID,
|
||||
type: 'bundle_build_done',
|
||||
});
|
||||
return bundle;
|
||||
}, error => {
|
||||
this._reporter.update({
|
||||
buildID,
|
||||
type: 'bundle_build_failed',
|
||||
});
|
||||
return Promise.reject(error);
|
||||
});
|
||||
}
|
||||
|
||||
useCachedOrUpdateOrCreateBundle(
|
||||
buildID: string,
|
||||
options: BundleOptions,
|
||||
): Promise<Bundle> {
|
||||
const optionsJson = this.optionsHash(options);
|
||||
const bundleFromScratch = () => {
|
||||
const building = this.buildBundle(options);
|
||||
this._bundles[optionsJson] = building;
|
||||
return building;
|
||||
};
|
||||
|
||||
if (optionsJson in this._bundles) {
|
||||
return this._bundles[optionsJson].then(bundle => {
|
||||
const deps = bundleDeps.get(bundle);
|
||||
// $FlowFixMe(>=0.37.0)
|
||||
const {dependencyPairs, files, idToIndex, outdated} = deps;
|
||||
if (outdated.size) {
|
||||
|
||||
const updatingExistingBundleLogEntry =
|
||||
log(createActionStartEntry({
|
||||
action_name: 'Updating existing bundle',
|
||||
outdated_modules: outdated.size,
|
||||
}));
|
||||
|
||||
debug('Attempt to update existing bundle');
|
||||
|
||||
// $FlowFixMe(>=0.37.0)
|
||||
deps.outdated = new Set();
|
||||
|
||||
const {platform, dev, minify, hot} = options;
|
||||
|
||||
// Need to create a resolution response to pass to the bundler
|
||||
// to process requires after transform. By providing a
|
||||
// specific response we can compute a non recursive one which
|
||||
// is the least we need and improve performance.
|
||||
const bundlePromise = this._bundles[optionsJson] =
|
||||
Promise.all([
|
||||
this.getDependencies({
|
||||
platform, dev, hot, minify,
|
||||
entryFile: options.entryFile,
|
||||
recursive: false,
|
||||
}),
|
||||
Promise.all(Array.from(outdated, this.getModuleForPath, this)),
|
||||
]).then(([response, changedModules]) => {
|
||||
debug('Update bundle: rebuild shallow bundle');
|
||||
|
||||
changedModules.forEach(m => {
|
||||
response.setResolvedDependencyPairs(
|
||||
m,
|
||||
/* $FlowFixMe: should be enforced not to be null. */
|
||||
dependencyPairs.get(m.path),
|
||||
{ignoreFinalized: true},
|
||||
);
|
||||
});
|
||||
|
||||
return this.buildBundle({
|
||||
...options,
|
||||
resolutionResponse: response.copy({
|
||||
dependencies: changedModules,
|
||||
}),
|
||||
}).then(updateBundle => {
|
||||
const oldModules = bundle.getModules();
|
||||
const newModules = updateBundle.getModules();
|
||||
for (let i = 0, n = newModules.length; i < n; i++) {
|
||||
const moduleTransport = newModules[i];
|
||||
const {meta, sourcePath} = moduleTransport;
|
||||
if (outdated.has(sourcePath)) {
|
||||
/* $FlowFixMe: `meta` could be empty */
|
||||
if (!contentsEqual(meta.dependencies, new Set(files.get(sourcePath)))) {
|
||||
// bail out if any dependencies changed
|
||||
return Promise.reject(Error(
|
||||
`Dependencies of ${sourcePath} changed from [${
|
||||
/* $FlowFixMe: `get` can return empty */
|
||||
files.get(sourcePath).join(', ')
|
||||
}] to [${
|
||||
/* $FlowFixMe: `meta` could be empty */
|
||||
meta.dependencies.join(', ')
|
||||
}]`
|
||||
));
|
||||
}
|
||||
|
||||
oldModules[idToIndex.get(moduleTransport.id)] = moduleTransport;
|
||||
}
|
||||
}
|
||||
|
||||
bundle.invalidateSource();
|
||||
|
||||
log(createActionEndEntry(updatingExistingBundleLogEntry));
|
||||
|
||||
debug('Successfully updated existing bundle');
|
||||
return bundle;
|
||||
});
|
||||
}).catch(e => {
|
||||
debug('Failed to update existing bundle, rebuilding...', e.stack || e.message);
|
||||
return bundleFromScratch();
|
||||
});
|
||||
return this._reportBundlePromise(buildID, options, bundlePromise);
|
||||
} else {
|
||||
debug('Using cached bundle');
|
||||
return bundle;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return this._reportBundlePromise(buildID, options, bundleFromScratch());
|
||||
}
|
||||
|
||||
processRequest(
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
next: () => mixed,
|
||||
) {
|
||||
const urlObj = url.parse(req.url, true);
|
||||
const {host} = req.headers;
|
||||
debug(`Handling request: ${host ? 'http://' + host : ''}${req.url}`);
|
||||
/* $FlowFixMe: Could be empty if the URL is invalid. */
|
||||
const pathname: string = urlObj.pathname;
|
||||
|
||||
let requestType;
|
||||
if (pathname.match(/\.bundle$/)) {
|
||||
requestType = 'bundle';
|
||||
} else if (pathname.match(/\.map$/)) {
|
||||
requestType = 'map';
|
||||
} else if (pathname.match(/\.assets$/)) {
|
||||
requestType = 'assets';
|
||||
} else if (pathname.match(/^\/debug/)) {
|
||||
this._processDebugRequest(req.url, res);
|
||||
return;
|
||||
} else if (pathname.match(/^\/onchange\/?$/)) {
|
||||
this._processOnChangeRequest(req, res);
|
||||
return;
|
||||
} else if (pathname.match(/^\/assets\//)) {
|
||||
this._processAssetsRequest(req, res);
|
||||
return;
|
||||
} else if (pathname === '/symbolicate') {
|
||||
this._symbolicate(req, res);
|
||||
return;
|
||||
} else {
|
||||
next();
|
||||
return;
|
||||
}
|
||||
|
||||
const options = this._getOptionsFromUrl(req.url);
|
||||
const requestingBundleLogEntry =
|
||||
log(createActionStartEntry({
|
||||
action_name: 'Requesting bundle',
|
||||
bundle_url: req.url,
|
||||
entry_point: options.entryFile,
|
||||
}));
|
||||
|
||||
const buildID = this.getNewBuildID();
|
||||
let reportProgress = emptyFunction;
|
||||
if (!this._opts.silent) {
|
||||
reportProgress = (transformedFileCount, totalFileCount) => {
|
||||
this._reporter.update({
|
||||
buildID,
|
||||
type: 'bundle_transform_progressed',
|
||||
transformedFileCount,
|
||||
totalFileCount,
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
const mres = MultipartResponse.wrap(req, res);
|
||||
options.onProgress = (done, total) => {
|
||||
reportProgress(done, total);
|
||||
mres.writeChunk({'Content-Type': 'application/json'}, JSON.stringify({done, total}));
|
||||
};
|
||||
|
||||
debug('Getting bundle for request');
|
||||
const building = this.useCachedOrUpdateOrCreateBundle(buildID, options);
|
||||
building.then(
|
||||
p => {
|
||||
if (requestType === 'bundle') {
|
||||
debug('Generating source code');
|
||||
const bundleSource = p.getSource({
|
||||
inlineSourceMap: options.inlineSourceMap,
|
||||
minify: options.minify,
|
||||
dev: options.dev,
|
||||
});
|
||||
debug('Writing response headers');
|
||||
const etag = p.getEtag();
|
||||
mres.setHeader('Content-Type', 'application/javascript');
|
||||
mres.setHeader('ETag', etag);
|
||||
|
||||
if (req.headers['if-none-match'] === etag) {
|
||||
debug('Responding with 304');
|
||||
mres.writeHead(304);
|
||||
mres.end();
|
||||
} else {
|
||||
mres.end(bundleSource);
|
||||
}
|
||||
debug('Finished response');
|
||||
log(createActionEndEntry(requestingBundleLogEntry));
|
||||
} else if (requestType === 'map') {
|
||||
const sourceMap = p.getSourceMapString({
|
||||
minify: options.minify,
|
||||
dev: options.dev,
|
||||
});
|
||||
|
||||
mres.setHeader('Content-Type', 'application/json');
|
||||
mres.end(sourceMap);
|
||||
log(createActionEndEntry(requestingBundleLogEntry));
|
||||
} else if (requestType === 'assets') {
|
||||
const assetsList = JSON.stringify(p.getAssets());
|
||||
mres.setHeader('Content-Type', 'application/json');
|
||||
mres.end(assetsList);
|
||||
log(createActionEndEntry(requestingBundleLogEntry));
|
||||
}
|
||||
},
|
||||
error => this._handleError(mres, this.optionsHash(options), error)
|
||||
).catch(error => {
|
||||
process.nextTick(() => {
|
||||
throw error;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_symbolicate(req: IncomingMessage, res: ServerResponse) {
|
||||
const symbolicatingLogEntry =
|
||||
log(createActionStartEntry('Symbolicating'));
|
||||
|
||||
debug('Start symbolication');
|
||||
|
||||
/* $FlowFixMe: where is `rowBody` defined? Is it added by
|
||||
* the `connect` framework? */
|
||||
Promise.resolve(req.rawBody).then(body => {
|
||||
const stack = JSON.parse(body).stack;
|
||||
|
||||
// In case of multiple bundles / HMR, some stack frames can have
|
||||
// different URLs from others
|
||||
const urls = new Set();
|
||||
stack.forEach(frame => {
|
||||
const sourceUrl = frame.file;
|
||||
// Skip `/debuggerWorker.js` which drives remote debugging because it
|
||||
// does not need to symbolication.
|
||||
// Skip anything except http(s), because there is no support for that yet
|
||||
if (!urls.has(sourceUrl) &&
|
||||
!sourceUrl.endsWith('/debuggerWorker.js') &&
|
||||
sourceUrl.startsWith('http')) {
|
||||
urls.add(sourceUrl);
|
||||
}
|
||||
});
|
||||
|
||||
const mapPromises =
|
||||
Array.from(urls.values()).map(this._sourceMapForURL, this);
|
||||
|
||||
debug('Getting source maps for symbolication');
|
||||
return Promise.all(mapPromises).then(maps => {
|
||||
debug('Sending stacks and maps to symbolication worker');
|
||||
const urlsToMaps = zip(urls.values(), maps);
|
||||
return this._symbolicateInWorker(stack, urlsToMaps);
|
||||
});
|
||||
}).then(
|
||||
stack => {
|
||||
debug('Symbolication done');
|
||||
res.end(JSON.stringify({stack}));
|
||||
process.nextTick(() => {
|
||||
log(createActionEndEntry(symbolicatingLogEntry));
|
||||
});
|
||||
},
|
||||
error => {
|
||||
console.error(error.stack || error);
|
||||
res.statusCode = 500;
|
||||
res.end(JSON.stringify({error: error.message}));
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
_sourceMapForURL(reqUrl: string): Promise<SourceMap> {
|
||||
const options = this._getOptionsFromUrl(reqUrl);
|
||||
// We're not properly reporting progress here. Reporting should be done
|
||||
// from within that function.
|
||||
const building = this.useCachedOrUpdateOrCreateBundle(
|
||||
this.getNewBuildID(),
|
||||
options,
|
||||
);
|
||||
return building.then(p => p.getSourceMap({
|
||||
minify: options.minify,
|
||||
dev: options.dev,
|
||||
}));
|
||||
}
|
||||
|
||||
_handleError(res: ServerResponse, bundleID: string, error: {
|
||||
status: number,
|
||||
type: string,
|
||||
description: string,
|
||||
filename: string,
|
||||
lineNumber: number,
|
||||
errors: Array<{description: string, filename: string, lineNumber: number}>,
|
||||
}) {
|
||||
res.writeHead(error.status || 500, {
|
||||
'Content-Type': 'application/json; charset=UTF-8',
|
||||
});
|
||||
|
||||
if (error instanceof Error && (
|
||||
error.type === 'TransformError' ||
|
||||
error.type === 'NotFoundError' ||
|
||||
error.type === 'UnableToResolveError'
|
||||
)) {
|
||||
error.errors = [{
|
||||
description: error.description,
|
||||
filename: error.filename,
|
||||
lineNumber: error.lineNumber,
|
||||
}];
|
||||
res.end(JSON.stringify(error));
|
||||
|
||||
if (error.type === 'NotFoundError') {
|
||||
delete this._bundles[bundleID];
|
||||
}
|
||||
this._reporter.update({error, type: 'bundling_error'});
|
||||
} else {
|
||||
console.error(error.stack || error);
|
||||
res.end(JSON.stringify({
|
||||
type: 'InternalError',
|
||||
message: 'react-packager has encountered an internal error, ' +
|
||||
'please check your terminal error output for more details',
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
_getOptionsFromUrl(reqUrl: string): BundleOptions {
|
||||
// `true` to parse the query param as an object.
|
||||
const urlObj = url.parse(reqUrl, true);
|
||||
|
||||
/* $FlowFixMe: `pathname` could be empty for an invalid URL */
|
||||
const pathname = decodeURIComponent(urlObj.pathname);
|
||||
|
||||
// Backwards compatibility. Options used to be as added as '.' to the
|
||||
// entry module name. We can safely remove these options.
|
||||
const entryFile = pathname.replace(/^\//, '').split('.').filter(part => {
|
||||
if (part === 'includeRequire' || part === 'runModule' ||
|
||||
part === 'bundle' || part === 'map' || part === 'assets') {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}).join('.') + '.js';
|
||||
|
||||
// try to get the platform from the url
|
||||
/* $FlowFixMe: `query` could be empty for an invalid URL */
|
||||
const platform = urlObj.query.platform ||
|
||||
parsePlatformFilePath(pathname, this._platforms).platform;
|
||||
|
||||
/* $FlowFixMe: `query` could be empty for an invalid URL */
|
||||
const assetPlugin = urlObj.query.assetPlugin;
|
||||
const assetPlugins = Array.isArray(assetPlugin) ?
|
||||
assetPlugin :
|
||||
(typeof assetPlugin === 'string') ? [assetPlugin] : [];
|
||||
|
||||
const dev = this._getBoolOptionFromQuery(urlObj.query, 'dev', true);
|
||||
const minify = this._getBoolOptionFromQuery(urlObj.query, 'minify', false);
|
||||
return {
|
||||
sourceMapUrl: url.format({
|
||||
hash: urlObj.hash,
|
||||
pathname: pathname.replace(/\.bundle$/, '.map'),
|
||||
query: urlObj.query,
|
||||
search: urlObj.search,
|
||||
}),
|
||||
entryFile,
|
||||
dev,
|
||||
minify,
|
||||
hot: this._getBoolOptionFromQuery(urlObj.query, 'hot', false),
|
||||
runBeforeMainModule: defaults.runBeforeMainModule,
|
||||
runModule: this._getBoolOptionFromQuery(urlObj.query, 'runModule', true),
|
||||
inlineSourceMap: this._getBoolOptionFromQuery(
|
||||
urlObj.query,
|
||||
'inlineSourceMap',
|
||||
false
|
||||
),
|
||||
isolateModuleIDs: false,
|
||||
platform,
|
||||
resolutionResponse: null,
|
||||
entryModuleOnly: this._getBoolOptionFromQuery(
|
||||
urlObj.query,
|
||||
'entryModuleOnly',
|
||||
false,
|
||||
),
|
||||
generateSourceMaps:
|
||||
minify || !dev || this._getBoolOptionFromQuery(urlObj.query, 'babelSourcemap', false),
|
||||
assetPlugins,
|
||||
onProgress: null,
|
||||
unbundle: false,
|
||||
};
|
||||
}
|
||||
|
||||
_getBoolOptionFromQuery(query: ?{}, opt: string, defaultVal: boolean): boolean {
|
||||
/* $FlowFixMe: `query` could be empty when it comes from an invalid URL */
|
||||
if (query[opt] == null) {
|
||||
return defaultVal;
|
||||
}
|
||||
|
||||
return query[opt] === 'true' || query[opt] === '1';
|
||||
}
|
||||
|
||||
getNewBuildID(): string {
|
||||
return (this._nextBundleBuildID++).toString(36);
|
||||
}
|
||||
|
||||
static DEFAULT_BUNDLE_OPTIONS;
|
||||
|
||||
}
|
||||
|
||||
Server.DEFAULT_BUNDLE_OPTIONS = {
|
||||
assetPlugins: [],
|
||||
dev: true,
|
||||
entryModuleOnly: false,
|
||||
generateSourceMaps: false,
|
||||
hot: false,
|
||||
inlineSourceMap: false,
|
||||
isolateModuleIDs: false,
|
||||
minify: false,
|
||||
onProgress: null,
|
||||
resolutionResponse: null,
|
||||
runBeforeMainModule: defaults.runBeforeMainModule,
|
||||
runModule: true,
|
||||
sourceMapUrl: null,
|
||||
unbundle: false,
|
||||
};
|
||||
|
||||
function contentsEqual<T>(array: Array<T>, set: Set<T>): boolean {
|
||||
return array.length === set.size && array.every(set.has, set);
|
||||
}
|
||||
|
||||
function* zip<X, Y>(xs: Iterable<X>, ys: Iterable<Y>): Iterable<[X, Y]> {
|
||||
//$FlowIssue #9324959
|
||||
const ysIter: Iterator<Y> = ys[Symbol.iterator]();
|
||||
for (const x of xs) {
|
||||
const y = ysIter.next();
|
||||
if (y.done) {
|
||||
return;
|
||||
}
|
||||
yield [x, y.value];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Server;
|
|
@ -1,106 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
jest.mock('child_process');
|
||||
jest.mock('net');
|
||||
|
||||
const EventEmitter = require('events');
|
||||
const {Readable} = require('stream');
|
||||
const {createWorker} = require('../');
|
||||
|
||||
let childProcess, socketResponse, socket, worker;
|
||||
|
||||
beforeEach(() => {
|
||||
childProcess = Object.assign(new EventEmitter(), {send: jest.fn()});
|
||||
require('child_process').fork.mockReturnValueOnce(childProcess);
|
||||
setupCommunication();
|
||||
|
||||
socketResponse = '{"error": "no fake socket response set"}';
|
||||
socket = Object.assign(new Readable(), {
|
||||
_read() {
|
||||
this.push(socketResponse);
|
||||
this.push(null);
|
||||
},
|
||||
end: jest.fn(),
|
||||
setEncoding: jest.fn(),
|
||||
});
|
||||
require('net').createConnection.mockImplementation(() => socket);
|
||||
|
||||
worker = createWorker();
|
||||
});
|
||||
|
||||
it('sends a socket path to the child process', () => {
|
||||
socketResponse = '{}';
|
||||
return worker([], fakeSourceMaps())
|
||||
.then(() => expect(childProcess.send).toBeCalledWith(expect.any(String)));
|
||||
});
|
||||
|
||||
it('fails if the child process emits an error', () => {
|
||||
const error = new Error('Expected error');
|
||||
childProcess.send.mockImplementation(() =>
|
||||
childProcess.emit('error', error));
|
||||
|
||||
expect.assertions(1);
|
||||
return worker([], fakeSourceMaps())
|
||||
.catch(e => expect(e).toBe(error));
|
||||
});
|
||||
|
||||
it('fails if the socket connection emits an error', () => {
|
||||
const error = new Error('Expected error');
|
||||
socket._read = () => socket.emit('error', error);
|
||||
|
||||
expect.assertions(1);
|
||||
return worker([], fakeSourceMaps())
|
||||
.catch(e => expect(e).toBe(error));
|
||||
});
|
||||
|
||||
it('sends the passed in stack and maps over the socket', () => {
|
||||
socketResponse = '{}';
|
||||
const stack = ['the', 'stack'];
|
||||
return worker(stack, fakeSourceMaps())
|
||||
.then(() =>
|
||||
expect(socket.end).toBeCalledWith(JSON.stringify({
|
||||
maps: Array.from(fakeSourceMaps()),
|
||||
stack,
|
||||
})));
|
||||
});
|
||||
|
||||
it('resolves to the `result` property of the message returned over the socket', () => {
|
||||
socketResponse = '{"result": {"the": "result"}}';
|
||||
return worker([], fakeSourceMaps())
|
||||
.then(response => expect(response).toEqual({the: 'result'}));
|
||||
});
|
||||
|
||||
it('rejects with the `error` property of the message returned over the socket', () => {
|
||||
socketResponse = '{"error": "the error message"}';
|
||||
|
||||
expect.assertions(1);
|
||||
return worker([], fakeSourceMaps())
|
||||
.catch(error => expect(error).toEqual(new Error('the error message')));
|
||||
});
|
||||
|
||||
it('rejects if the socket response cannot be parsed as JSON', () => {
|
||||
socketResponse = '{';
|
||||
|
||||
expect.assertions(1);
|
||||
return worker([], fakeSourceMaps())
|
||||
.catch(error => expect(error).toBeInstanceOf(SyntaxError));
|
||||
});
|
||||
|
||||
function setupCommunication() {
|
||||
childProcess.send.mockImplementation(() =>
|
||||
process.nextTick(() => childProcess.emit('message')));
|
||||
}
|
||||
|
||||
function* fakeSourceMaps() {
|
||||
yield [1, {}];
|
||||
yield [2, {}];
|
||||
}
|
|
@ -1,157 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const {LazyPromise, LockingPromise} = require('../util');
|
||||
|
||||
describe('Lazy Promise', () => {
|
||||
let factory;
|
||||
const value = {};
|
||||
|
||||
beforeEach(() => {
|
||||
factory = jest.fn();
|
||||
factory.mockReturnValue(Promise.resolve(value));
|
||||
});
|
||||
|
||||
it('does not run the factory by default', () => {
|
||||
new LazyPromise(factory); // eslint-disable-line no-new
|
||||
expect(factory).not.toBeCalled();
|
||||
});
|
||||
|
||||
it('calling `.then()` returns a promise', () => {
|
||||
expect(new LazyPromise(factory).then()).toBeInstanceOf(Promise);
|
||||
});
|
||||
|
||||
it('does not invoke the factory twice', () => {
|
||||
const p = new LazyPromise(factory);
|
||||
p.then(x => x);
|
||||
p.then(x => x);
|
||||
expect(factory).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
describe('value and error propagation', () => {
|
||||
it('resolves to the value provided by the factory', () => {
|
||||
expect.assertions(1);
|
||||
return new LazyPromise(factory)
|
||||
.then(v => expect(v).toBe(value));
|
||||
});
|
||||
|
||||
it('passes through errors if not handled', () => {
|
||||
const error = new Error('Unhandled');
|
||||
factory.mockReturnValue(Promise.reject(error));
|
||||
|
||||
expect.assertions(1);
|
||||
return new LazyPromise(factory)
|
||||
.then()
|
||||
.catch(e => expect(e).toBe(error));
|
||||
});
|
||||
|
||||
it('uses rejection handlers passed to `then()`', () => {
|
||||
const error = new Error('Must be handled');
|
||||
factory.mockReturnValue(Promise.reject(error));
|
||||
|
||||
expect.assertions(1);
|
||||
return new LazyPromise(factory)
|
||||
.then(() => {}, e => expect(e).toBe(error));
|
||||
});
|
||||
|
||||
it('uses rejection handlers passed to `catch()`', () => {
|
||||
const error = new Error('Must be handled');
|
||||
factory.mockReturnValue(Promise.reject(error));
|
||||
|
||||
expect.assertions(1);
|
||||
return new LazyPromise(factory)
|
||||
.catch(e => expect(e).toBe(error));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Locking Promise', () => {
|
||||
it('resolves to the value of the passed-in promise', () => {
|
||||
const value = {};
|
||||
|
||||
expect.assertions(1);
|
||||
return new LockingPromise(Promise.resolve(value))
|
||||
.then(v => expect(v).toBe(value));
|
||||
});
|
||||
|
||||
it('passes through rejections', () => {
|
||||
const error = new Error('Rejection');
|
||||
|
||||
expect.assertions(1);
|
||||
return new LockingPromise(Promise.reject(error))
|
||||
.then()
|
||||
.catch(e => expect(e).toBe(error));
|
||||
});
|
||||
|
||||
it('uses rejection handlers passed to `then()`', () => {
|
||||
const error = new Error('Must be handled');
|
||||
|
||||
expect.assertions(1);
|
||||
return new LockingPromise(Promise.reject(error))
|
||||
.then(x => x, e => expect(e).toBe(error));
|
||||
});
|
||||
|
||||
it('uses rejection handlers passed to `catch()`', () => {
|
||||
const error = new Error('Must be handled');
|
||||
|
||||
expect.assertions(1);
|
||||
return new LockingPromise(Promise.reject(error))
|
||||
.catch(e => expect(e).toBe(error));
|
||||
});
|
||||
|
||||
describe('locking', () => {
|
||||
const value = Symbol;
|
||||
let locking;
|
||||
beforeEach(() => {
|
||||
locking = new LockingPromise(Promise.resolve(value));
|
||||
});
|
||||
|
||||
|
||||
it('only allows one handler to access the promise value', () => {
|
||||
const deferred = defer();
|
||||
const secondHandler = jest.fn();
|
||||
locking.then(() => deferred.promise);
|
||||
locking.then(secondHandler);
|
||||
return Promise.resolve() // wait for the next tick
|
||||
.then(() => expect(secondHandler).not.toBeCalled());
|
||||
});
|
||||
|
||||
it('allows waiting handlers to access the value after the current handler resolves', () => {
|
||||
let counter = 0;
|
||||
|
||||
const deferred = defer();
|
||||
const x = locking.then(v => {
|
||||
const result = [++counter, v];
|
||||
return deferred.promise.then(() => result);
|
||||
});
|
||||
const y = locking.then(v => [++counter, v]);
|
||||
const z = locking.then(v => [++counter, v]);
|
||||
|
||||
deferred.resolve();
|
||||
|
||||
return Promise.all([x, y, z])
|
||||
.then(([first, second, third]) => {
|
||||
expect(first).toEqual([1, value]);
|
||||
expect(second).toEqual([2, value]);
|
||||
expect(third).toEqual([3, value]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
function defer() {
|
||||
let resolve;
|
||||
const promise = new Promise(res => { resolve = res; });
|
||||
return {promise, resolve};
|
||||
}
|
|
@ -1,110 +0,0 @@
|
|||
/**
|
||||
* Copyright (c) 2015-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
* @format
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
jest.disableAutomock();
|
||||
|
||||
const SourceMapGenerator = require('../../../Bundler/source-map/Generator');
|
||||
const {symbolicate} = require('../worker');
|
||||
|
||||
let connection;
|
||||
beforeEach(() => {
|
||||
connection = {end: jest.fn()};
|
||||
});
|
||||
|
||||
it('symbolicates stack frames', () => {
|
||||
const mappings = [
|
||||
{
|
||||
from: {file: 'bundle1.js', lineNumber: 1, column: 2},
|
||||
to: {file: 'apples.js', lineNumber: 12, column: 34},
|
||||
},
|
||||
{
|
||||
from: {file: 'bundle2.js', lineNumber: 3, column: 4},
|
||||
to: {file: 'bananas.js', lineNumber: 56, column: 78},
|
||||
},
|
||||
{
|
||||
from: {file: 'bundle1.js', lineNumber: 5, column: 6},
|
||||
to: {file: 'clementines.js', lineNumber: 90, column: 12},
|
||||
},
|
||||
];
|
||||
|
||||
const stack = mappings.map(m => m.to);
|
||||
const maps = Object.entries(
|
||||
groupBy(mappings, m => m.from.file),
|
||||
).map(([file, ms]) => [file, sourceMap(file, ms)]);
|
||||
|
||||
return symbolicate(connection, makeData(stack, maps)).then(() =>
|
||||
expect(connection.end).toBeCalledWith(
|
||||
JSON.stringify({result: mappings.map(m => m.to)}),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('ignores stack frames without corresponding map', () => {
|
||||
const frame = {
|
||||
file: 'arbitrary.js',
|
||||
lineNumber: 123,
|
||||
column: 456,
|
||||
};
|
||||
|
||||
return symbolicate(
|
||||
connection,
|
||||
makeData([frame], [['other.js', emptyMap()]]),
|
||||
).then(() =>
|
||||
expect(connection.end).toBeCalledWith(JSON.stringify({result: [frame]})),
|
||||
);
|
||||
});
|
||||
|
||||
it('ignores `/debuggerWorker.js` stack frames', () => {
|
||||
const frame = {
|
||||
file: 'http://localhost:8081/debuggerWorker.js',
|
||||
lineNumber: 123,
|
||||
column: 456,
|
||||
};
|
||||
|
||||
return symbolicate(connection, makeData([frame])).then(() =>
|
||||
expect(connection.end).toBeCalledWith(JSON.stringify({result: [frame]})),
|
||||
);
|
||||
});
|
||||
|
||||
function makeData(stack, maps = []) {
|
||||
return JSON.stringify({maps, stack});
|
||||
}
|
||||
|
||||
function sourceMap(file, mappings) {
|
||||
const g = new SourceMapGenerator();
|
||||
g.startFile(file, null);
|
||||
mappings.forEach(({from, to}) =>
|
||||
g.addSourceMapping(to.lineNumber, to.column, from.lineNumber, from.column),
|
||||
);
|
||||
return g.toMap();
|
||||
}
|
||||
|
||||
function groupBy(xs, key) {
|
||||
const grouped = {};
|
||||
xs.forEach(x => {
|
||||
const k = key(x);
|
||||
if (k in grouped) {
|
||||
grouped[k].push(x);
|
||||
} else {
|
||||
grouped[k] = [x];
|
||||
}
|
||||
});
|
||||
return grouped;
|
||||
}
|
||||
|
||||
function emptyMap() {
|
||||
return {
|
||||
version: 3,
|
||||
sources: [],
|
||||
mappings: '',
|
||||
};
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue