Started refactoring admin scripts into TypeScript.
This commit is contained in:
parent
27b3af0a7b
commit
f7e82fcb46
|
@ -1,5 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
const { setupBuild } = require("../build");
|
||||
|
||||
setupBuild(false);
|
|
@ -1,37 +0,0 @@
|
|||
const { setupBuild } = require("../build");
|
||||
const { loadPackage, savePackage } = require("../local");
|
||||
|
||||
const arg = process.argv[2];
|
||||
|
||||
(async function() {
|
||||
process.argv.slice(2).forEach((arg) => {
|
||||
console.log("Setting Option:", arg);
|
||||
switch(arg) {
|
||||
case "esm":
|
||||
setupBuild(true);
|
||||
break;
|
||||
|
||||
case "cjs":
|
||||
setupBuild(false);
|
||||
break;
|
||||
|
||||
// This will remove the browser field entirely, so make sure
|
||||
// to set esm of cjs first as they will restore the browser
|
||||
// field
|
||||
case "browser-lang-all": {
|
||||
const info = loadPackage("wordlists");
|
||||
delete info.browser;
|
||||
savePackage("wordlists", info);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
console.log("Unknown option:", arg);
|
||||
return 1;
|
||||
}
|
||||
});
|
||||
return 0;
|
||||
|
||||
})().then((result) => {
|
||||
process.exit(result);
|
||||
});
|
|
@ -1,16 +0,0 @@
|
|||
"use stricT";
|
||||
|
||||
const depgraph = require("../depgraph");
|
||||
const { log } = require("../log");
|
||||
const { loadJson, resolve, saveJson } = require("../utils");
|
||||
|
||||
(async function() {
|
||||
log(`<bold:Updating dependency-graph build order (tsconfig.project.json)...>`);
|
||||
let ordered = depgraph.getOrdered(true);
|
||||
|
||||
let path = resolve("tsconfig.project.json")
|
||||
|
||||
let projectConfig = loadJson(path);
|
||||
projectConfig.references = ordered.map((name) => ({ path: ("./packages/" + name) }));
|
||||
saveJson(path, projectConfig);
|
||||
})();
|
|
@ -1,30 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
const fs = require("fs");
|
||||
const { resolve } = require("path");
|
||||
|
||||
const sourceEthers = fs.readFileSync(resolve(__dirname, "../../packages/ethers/src.ts/ethers.ts")).toString();
|
||||
const targets = sourceEthers.match(/export\s*{\s*((.|\s)*)}/)[1].trim();
|
||||
|
||||
const output = `"use strict";
|
||||
|
||||
// To modify this file, you must update ./admin/cmds/update-exports.js
|
||||
|
||||
import * as ethers from "./ethers";
|
||||
|
||||
try {
|
||||
const anyGlobal = (window as any);
|
||||
|
||||
if (anyGlobal._ethers == null) {
|
||||
anyGlobal._ethers = ethers;
|
||||
}
|
||||
} catch (error) { }
|
||||
|
||||
export { ethers };
|
||||
|
||||
export {
|
||||
${ targets }
|
||||
} from "./ethers";
|
||||
`;
|
||||
|
||||
fs.writeFileSync(resolve(__dirname, "../../packages/ethers/src.ts/index.ts"), output);
|
|
@ -0,0 +1 @@
|
|||
export declare function setupBuild(buildModule: boolean): void;
|
|
@ -0,0 +1,48 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path_1 = require("./path");
|
||||
const utils_1 = require("./utils");
|
||||
function setupConfig(outDir, moduleType, targetType) {
|
||||
// Configure the tsconfit.package.json...
|
||||
const path = path_1.resolve("tsconfig.package.json");
|
||||
const content = utils_1.loadJson(path);
|
||||
content.compilerOptions.module = moduleType;
|
||||
content.compilerOptions.target = targetType;
|
||||
utils_1.saveJson(path, content, true);
|
||||
// Configure the browser field for every pacakge, copying the
|
||||
// browser.umd filed for UMD and browser.esm for ESM
|
||||
path_1.dirnames.forEach((dirname) => {
|
||||
const filename = path_1.getPackageJsonPath(dirname);
|
||||
const info = utils_1.loadJson(filename);
|
||||
if (info._ethers_nobuild) {
|
||||
return;
|
||||
}
|
||||
if (targetType === "es2015") {
|
||||
if (info["browser.esm"]) {
|
||||
info.browser = info["browser.esm"];
|
||||
}
|
||||
}
|
||||
else if (targetType === "es5") {
|
||||
if (info["browser.umd"]) {
|
||||
info.browser = info["browser.umd"];
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new Error("unsupported target");
|
||||
}
|
||||
utils_1.saveJson(filename, info, true);
|
||||
let path = path_1.resolve("packages", dirname, "tsconfig.json");
|
||||
let content = utils_1.loadJson(path);
|
||||
content.compilerOptions.outDir = outDir;
|
||||
utils_1.saveJson(path, content, true);
|
||||
});
|
||||
}
|
||||
function setupBuild(buildModule) {
|
||||
if (buildModule) {
|
||||
setupConfig("./lib.esm/", "es2015", "es2015");
|
||||
}
|
||||
else {
|
||||
setupConfig("./lib/", "commonjs", "es5");
|
||||
}
|
||||
}
|
||||
exports.setupBuild = setupBuild;
|
|
@ -0,0 +1 @@
|
|||
export declare function foo(): void;
|
|
@ -0,0 +1,4 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
function foo() { }
|
||||
exports.foo = foo;
|
|
@ -0,0 +1 @@
|
|||
export {};
|
|
@ -0,0 +1,81 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const semver_1 = __importDefault(require("semver"));
|
||||
const path_1 = require("../path");
|
||||
const local = __importStar(require("../local"));
|
||||
const log_1 = require("../log");
|
||||
const npm = __importStar(require("../npm"));
|
||||
const utils_1 = require("../utils");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const progress = log_1.getProgressBar(log_1.colorify.bold("Bumping package.json versions"));
|
||||
const latestVersions = {};
|
||||
let updated = false;
|
||||
const output = [];
|
||||
// For each package, detect diff between tarball and remote
|
||||
for (let i = 0; i < path_1.dirnames.length; i++) {
|
||||
progress(i / path_1.dirnames.length);
|
||||
const dirname = path_1.dirnames[i];
|
||||
const pLocal = local.getPackage(dirname);
|
||||
const pNpm = yield npm.getPackage(dirname);
|
||||
const tarballHash = local.computeTarballHash(dirname);
|
||||
let version = pNpm.version;
|
||||
if (tarballHash !== pNpm.tarballHash) {
|
||||
version = semver_1.default.inc(version, "patch");
|
||||
output.push([
|
||||
" ",
|
||||
log_1.colorify.blue(pLocal.name),
|
||||
utils_1.repeat(" ", 47 - pLocal.name.length - pNpm.version.length),
|
||||
pNpm.version,
|
||||
log_1.colorify.bold(" => "),
|
||||
log_1.colorify.green(version)
|
||||
].join(""));
|
||||
local.updateJson(path_1.getPackageJsonPath(dirname), { tarballHash, version });
|
||||
updated = true;
|
||||
}
|
||||
latestVersions[pLocal.name] = version;
|
||||
// Write out the _version.ts
|
||||
if (!pLocal._ethers_nobuild) {
|
||||
const code = "export const version = " + JSON.stringify(dirname + "/" + pLocal.version) + ";\n";
|
||||
fs_1.default.writeFileSync(path_1.resolve(path_1.getPackagePath(dirname), "src.ts/_version.ts"), code);
|
||||
}
|
||||
}
|
||||
progress(1);
|
||||
if (updated) {
|
||||
const filename = path_1.resolve("packages/ethers/package.json");
|
||||
const info = utils_1.loadJson(filename);
|
||||
Object.keys(info.dependencies).forEach((name) => {
|
||||
const version = latestVersions[name];
|
||||
if (name == null) {
|
||||
return;
|
||||
}
|
||||
info.dependencies[name] = version;
|
||||
});
|
||||
utils_1.saveJson(filename, info);
|
||||
}
|
||||
output.forEach((line) => { console.log(line); });
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${process.argv[0]}: ${error.message}`);
|
||||
process.exit(1);
|
||||
});
|
|
@ -10,15 +10,15 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path_1 = require("../path");
|
||||
const local_1 = require("../local");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const deps = path_1.getDependencies();
|
||||
const dependencies = Object.keys(deps).reduce((accum, name) => {
|
||||
if (!path_1.isEthers(name)) {
|
||||
accum[name] = deps[name];
|
||||
}
|
||||
return accum;
|
||||
}, {});
|
||||
path_1.updateJson(path_1.dirs.rootPackageJsonPath, { dependencies });
|
||||
const dependencies = local_1.getDependencies(null, (name) => {
|
||||
return !path_1.isEthers(name);
|
||||
});
|
||||
local_1.updateJson(path_1.dirs.rootPackageJsonPath, { dependencies });
|
||||
});
|
||||
})();
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${process.argv[0]}: ${error.message}`);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -15,6 +15,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = require("path");
|
||||
const path_2 = require("../path");
|
||||
const local_1 = require("../local");
|
||||
function link(existing, path) {
|
||||
try {
|
||||
const current = fs_1.default.readlinkSync(path);
|
||||
|
@ -48,10 +49,14 @@ function link(existing, path) {
|
|||
});
|
||||
path_2.packages.forEach((name) => {
|
||||
const nodeModules = path_1.resolve(nodeModulesBase, path_2.getDirname(name));
|
||||
const deps = path_2.getDependencies(name);
|
||||
const deps = local_1.getDependencies(name);
|
||||
Object.keys(deps).forEach((name) => {
|
||||
link(path_1.resolve(path_2.dirs.root, "node_modules", name), path_1.resolve(nodeModules, name));
|
||||
});
|
||||
});
|
||||
});
|
||||
})();
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${process.argv[0]}: ${error.message}`);
|
||||
process.exit(1);
|
||||
});
|
||||
;
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
declare const major: any;
|
|
@ -1,15 +1,13 @@
|
|||
"use strict";
|
||||
|
||||
const { major } = require("semver");
|
||||
|
||||
// This should be used like `node npm-skip-node8 || COMMAND`.
|
||||
// - If node 8, this script returns true, skipping COMMAND
|
||||
// - Otherwise, return false, running COMMAND
|
||||
|
||||
if (major(process.version) > 8) {
|
||||
// Node >8; return "false" (wrt to shell scripting)
|
||||
process.exit(1);
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
// Node 8; return "true" (wrt to shell scripting)
|
||||
process.exit(0);
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
import { Server } from "http";
|
||||
export declare function getMime(filename: string): string;
|
||||
export declare type Options = {
|
||||
port?: number;
|
||||
redirects?: Record<string, string>;
|
||||
};
|
||||
export declare function start(root: string, options: Options): Server;
|
|
@ -0,0 +1,96 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const http_1 = require("http");
|
||||
const path_1 = require("path");
|
||||
function getMime(filename) {
|
||||
switch (filename.split('.').pop().toLowerCase()) {
|
||||
case 'css': return 'text/css';
|
||||
case 'doctree': return 'application/x-doctree';
|
||||
case 'eot': return 'application/vnd.ms-fontobject';
|
||||
case 'gif': return 'image/gif';
|
||||
case 'html': return 'text/html';
|
||||
case 'js': return 'application/javascript';
|
||||
case 'jpg': return 'image/jpeg';
|
||||
case 'jpeg': return 'image/jpeg';
|
||||
case 'md': return 'text/markdown';
|
||||
case 'pickle': return 'application/x-pickle';
|
||||
case 'png': return 'image/png';
|
||||
case 'svg': return 'image/svg+xml';
|
||||
case 'ttf': return 'application/x-font-ttf';
|
||||
case 'txt': return 'text/plain';
|
||||
case 'woff': return 'application/font-woff';
|
||||
}
|
||||
console.log('NO MIME', filename);
|
||||
return "application/octet-stream";
|
||||
}
|
||||
exports.getMime = getMime;
|
||||
function start(root, options) {
|
||||
if (root == null) {
|
||||
throw new Error("root required");
|
||||
}
|
||||
if (options == null) {
|
||||
options = {};
|
||||
}
|
||||
if (options.port == null) {
|
||||
options.port = 8000;
|
||||
}
|
||||
root = path_1.resolve(root);
|
||||
const server = http_1.createServer((req, resp) => {
|
||||
// Follow redirects in options
|
||||
if (options.redirects && options.redirects[req.url]) {
|
||||
resp.writeHead(301, { Location: options.redirects[req.url] });
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
let filename = path_1.resolve(root, "." + req.url);
|
||||
// Make sure we aren't crawling out of our sandbox
|
||||
if (req.url[0] !== "/" || filename.substring(0, filename.length) !== filename) {
|
||||
resp.writeHead(403);
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const stat = fs_1.default.statSync(filename);
|
||||
if (stat.isDirectory()) {
|
||||
// Redirect bare directory to its path (i.e. "/foo" => "/foo/")
|
||||
if (req.url[req.url.length - 1] !== "/") {
|
||||
resp.writeHead(301, { Location: req.url + "/" });
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
filename += "/index.html";
|
||||
}
|
||||
const content = fs_1.default.readFileSync(filename);
|
||||
resp.writeHead(200, {
|
||||
"Content-Length": content.length,
|
||||
"Content-Type": getMime(filename)
|
||||
});
|
||||
resp.end(content);
|
||||
return;
|
||||
}
|
||||
catch (error) {
|
||||
if (error.code === "ENOENT") {
|
||||
resp.writeHead(404, {});
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
resp.writeHead(500, {});
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
});
|
||||
server.listen(options.port, () => {
|
||||
console.log(`Server running on: http://localhost:${options.port}`);
|
||||
});
|
||||
return server;
|
||||
}
|
||||
exports.start = start;
|
||||
start(path_1.resolve(__dirname, "../../docs"), {
|
||||
redirects: {
|
||||
"/": "/v5/"
|
||||
}
|
||||
});
|
|
@ -0,0 +1 @@
|
|||
export {};
|
|
@ -0,0 +1,45 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const build_1 = require("../build");
|
||||
const log_1 = require("../log");
|
||||
const path_1 = require("../path");
|
||||
const utils_1 = require("../utils");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.argv.slice(2).forEach((arg) => {
|
||||
console.log(log_1.colorify.bold("Setting Option:"), arg);
|
||||
switch (arg) {
|
||||
case "esm":
|
||||
build_1.setupBuild(true);
|
||||
break;
|
||||
case "cjs":
|
||||
build_1.setupBuild(false);
|
||||
break;
|
||||
// This will remove the browser field entirely, so make sure
|
||||
// to set esm of cjs first as they will restore the browser
|
||||
// field
|
||||
case "browser-lang-all": {
|
||||
const filename = path_1.getPackageJsonPath("wordlists");
|
||||
const info = utils_1.loadJson(filename);
|
||||
delete info.browser;
|
||||
utils_1.saveJson(filename, info, true);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unknown option: ${JSON.stringify(arg)}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${process.argv[0]}: ${error.message}`);
|
||||
process.exit(1);
|
||||
});
|
|
@ -0,0 +1 @@
|
|||
export {};
|
|
@ -0,0 +1,225 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = require("path");
|
||||
const typescript_1 = __importDefault(require("typescript"));
|
||||
const Words = fs_1.default.readFileSync("/usr/share/dict/words").toString().split("\n").reduce((accum, word) => {
|
||||
accum[word.toLowerCase()] = true;
|
||||
return accum;
|
||||
}, {});
|
||||
`
|
||||
// Words missing from the dictionary
|
||||
accessing addresses aligned autofill called cancelled changed censored
|
||||
clamping compiled computed configured consumed creating decoded decoding
|
||||
decreased decrypt decrypted decrypting deployed deploying deprecated detected
|
||||
discontinued earliest email enabled encoded encoding encrypt
|
||||
encrypted encrypting entries euro exceeded existing expected
|
||||
expired failed fetches formatted formatting funding generated
|
||||
hardened has highly ignoring implemented implementer imported including instantiate
|
||||
joined keyword labelled larger lookup matches mined modified modifies multi
|
||||
named needed nested neutered numeric offline optimizer overriding owned packed
|
||||
padded parsed parsing passed placeholder processing properties prototyping reached
|
||||
recommended recovered redacted remaining replaced required
|
||||
serializes shared signed signing skipped stored supported tagging targetted
|
||||
throttled transactions uninstall unstake unsubscribe using verifies website
|
||||
|
||||
// Overly Specific Words
|
||||
bech BIP BIP39 BIP44 btc bzz crypto eip etc hashes hmac icap
|
||||
keccak ltc namehash ripemd RLP scrypt secp sha xdai
|
||||
|
||||
blockhash
|
||||
|
||||
bitcoin ethereum finney gwei kwei mwei satoshi szabo wei weth
|
||||
|
||||
crowdsale hexlify hd hdnode underpriced
|
||||
|
||||
boolean int struct tuple uint
|
||||
nonpayable
|
||||
jumpdest mstore shr shl xor
|
||||
|
||||
// Classes
|
||||
ABIEncoder testcase numberish Wordlist
|
||||
|
||||
// Common Code Strings
|
||||
abi addr api app arg arrayify asm basex bigint bignumber bn byte
|
||||
bytecode callback calldata checksum ciphertext cli codepoint commify config
|
||||
contenthash ctr ctrl debug dd dklen eexist encseed eof ethaddr
|
||||
ethseed ethers eval exec filename func gz hid http https hw iv
|
||||
info init ipc json kdf kdfparams labelhash lang lib mm multihash nfc
|
||||
nfkc nfd nfkd nodehash notok nowait nullish oob opcode pbkdf pc plugin
|
||||
pragma pre prf repl rpc sighash topichash solc stdin stdout subclasses
|
||||
subnode timeout todo txt ufixed utc utf util url uuid vm vs websocket
|
||||
wikipedia wx xe xpriv xpub xx yyyy zlib
|
||||
|
||||
// AbiV2
|
||||
abiv
|
||||
|
||||
// Query parameters
|
||||
apikey asc endblock startblock
|
||||
|
||||
alchemyapi Cloudflare Etherscan INFURA IPFS MetaMask Nodesmith
|
||||
Trezor ledgerhq axic bitcoinjs browserify easyseed ethereumjs
|
||||
goerli homestead kotti kovan mainnet morden mordor rinkeby
|
||||
ropsten testnet
|
||||
|
||||
// Demo words
|
||||
args foo eth foo foobar ll localhost passwd ricmoo tx xxx yna
|
||||
|
||||
// nameprep tags
|
||||
ALCat BiDi LCat nameprep
|
||||
|
||||
// Lanauge Codes (and short binary data)
|
||||
cn cz en es fr it ja tw zh zh_cn zh_tw
|
||||
OYAa IJBEJqXZJ
|
||||
|
||||
`.split("\n").filter((l) => (l.substring(0, 2) != "/\/")).join("\n").split(/\s+/g).forEach((word) => {
|
||||
word = word.trim();
|
||||
if (word === "") {
|
||||
return;
|
||||
}
|
||||
Words[word.toLowerCase()] = true;
|
||||
});
|
||||
function getStrings(source) {
|
||||
const sourceFile = typescript_1.default.createSourceFile("filename.ts", source, typescript_1.default.ScriptTarget.Latest);
|
||||
const result = [];
|
||||
function add(value, pos) {
|
||||
const lineNo = sourceFile.getLineAndCharacterOfPosition(pos).line + 1;
|
||||
result.push({ value, lineNo });
|
||||
}
|
||||
//let lastClass = null, lastEnum = null;
|
||||
function visit(node, depth) {
|
||||
switch (node.kind) {
|
||||
//case ts.SyntaxKind.TemplateExpression:
|
||||
// if (node.head) { visit(node.head); }
|
||||
// console.dir(node, { depth: null });
|
||||
// break;
|
||||
case typescript_1.default.SyntaxKind.TemplateHead:
|
||||
case typescript_1.default.SyntaxKind.TemplateMiddle:
|
||||
case typescript_1.default.SyntaxKind.TemplateTail:
|
||||
case typescript_1.default.SyntaxKind.StringLiteral:
|
||||
case typescript_1.default.SyntaxKind.NoSubstitutionTemplateLiteral:
|
||||
add(node.text, node.pos);
|
||||
break;
|
||||
}
|
||||
typescript_1.default.forEachChild(node, (node) => { return visit(node, depth + 1); });
|
||||
}
|
||||
visit(sourceFile, 0);
|
||||
return result;
|
||||
}
|
||||
const Include = new RegExp("packages/.*/src.ts/.*\.ts$");
|
||||
const Exclude = new RegExp("/node_modules/|src.ts/.*browser.*");
|
||||
function getAllStrings(path) {
|
||||
const Root = path_1.resolve(__dirname, path);
|
||||
const readdir = function (path) {
|
||||
if (path.match(Exclude)) {
|
||||
return [];
|
||||
}
|
||||
const stat = fs_1.default.statSync(path);
|
||||
if (stat.isDirectory()) {
|
||||
return fs_1.default.readdirSync(path).reduce((result, filename) => {
|
||||
readdir(path_1.resolve(path, filename)).forEach((file) => {
|
||||
result.push(file);
|
||||
});
|
||||
return result;
|
||||
}, []);
|
||||
}
|
||||
if (path.match(Include)) {
|
||||
const source = fs_1.default.readFileSync(path).toString();
|
||||
return [{ filename: path.substring(Root.length), values: getStrings(source) }];
|
||||
}
|
||||
return [];
|
||||
};
|
||||
return readdir(Root);
|
||||
}
|
||||
function checkWord(word) {
|
||||
word = word.toLowerCase();
|
||||
// A word
|
||||
if (Words[word]) {
|
||||
return true;
|
||||
}
|
||||
// Simple Plural
|
||||
if (word.match(/.*s$/) && Words[word.substring(0, word.length - 1)]) {
|
||||
return true;
|
||||
}
|
||||
// Hex string
|
||||
if (word.match(/^(0x)?[0-9a-f]*$/i)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function starts(text, prefix) {
|
||||
return (text.substring(0, prefix.length) === prefix);
|
||||
}
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let count = 0;
|
||||
getAllStrings(path_1.resolve(__dirname, "../../../../packages")).forEach((file) => {
|
||||
if (starts(file.filename, "/testcases/src.ts/generation-scripts")) {
|
||||
return;
|
||||
}
|
||||
if (starts(file.filename, "/asm/src.ts/opcodes.ts")) {
|
||||
return;
|
||||
}
|
||||
file.values.forEach((entry) => {
|
||||
function problem(word) {
|
||||
count++;
|
||||
console.log({
|
||||
filename: file.filename,
|
||||
word: JSON.stringify(word),
|
||||
sentence: JSON.stringify(entry.value.substring(0, 80)),
|
||||
line: entry.lineNo
|
||||
});
|
||||
}
|
||||
const value = entry.value.trim();
|
||||
// Emptry space
|
||||
if (value === "") {
|
||||
return;
|
||||
}
|
||||
// Prolly a require
|
||||
if (value.match(/^@ethersproject\/[a-z0-9-]+$/)) {
|
||||
return;
|
||||
}
|
||||
if (value.substring(0, 2) === "./") {
|
||||
return;
|
||||
}
|
||||
// Prolly encoded binary data
|
||||
if (value.indexOf(" ") === -1 && value.length > 20) {
|
||||
return;
|
||||
}
|
||||
if (checkWord(value)) {
|
||||
return;
|
||||
}
|
||||
value.replace(/([a-z+])([A-Z])/g, (all, first, secondLetter) => {
|
||||
return first + " " + secondLetter;
|
||||
}).replace(/((?:0x)?[A-Za-z]+)/gi, (all, word) => {
|
||||
if (checkWord(word)) {
|
||||
return "";
|
||||
}
|
||||
problem(word);
|
||||
return "";
|
||||
});
|
||||
;
|
||||
});
|
||||
});
|
||||
if (count) {
|
||||
console.log(`Found ${count} typos.`);
|
||||
process.exit(1);
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(error);
|
||||
process.exit(1);
|
||||
});
|
|
@ -0,0 +1 @@
|
|||
export {};
|
|
@ -0,0 +1,25 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const depgraph_1 = require("../depgraph");
|
||||
const path_1 = require("../path");
|
||||
const local_1 = require("../local");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const ordered = depgraph_1.getOrdered(true);
|
||||
local_1.updateJson(path_1.resolve("tsconfig.project.json"), {
|
||||
references: ordered.map((name) => ({ path: ("./packages/" + name) }))
|
||||
});
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(error);
|
||||
process.exit(1);
|
||||
});
|
|
@ -0,0 +1,5 @@
|
|||
declare const fs: any;
|
||||
declare const resolve: any;
|
||||
declare const sourceEthers: any;
|
||||
declare const targets: any;
|
||||
declare const output: string;
|
|
@ -0,0 +1,32 @@
|
|||
"use strict";
|
||||
const fs = require("fs");
|
||||
const { resolve } = require("../path");
|
||||
const sourceEthers = fs.readFileSync(resolve("packages/ethers/src.ts/ethers.ts")).toString();
|
||||
const targets = sourceEthers.match(/export\s*{\s*((.|\s)*)}/)[1].trim();
|
||||
////////////////////
|
||||
// Begin template
|
||||
////////////////////
|
||||
const output = `"use strict";
|
||||
|
||||
// To modify this file, you must update ./misc/admin/lib/cmds/update-exports.js
|
||||
|
||||
import * as ethers from "./ethers";
|
||||
|
||||
try {
|
||||
const anyGlobal = (window as any);
|
||||
|
||||
if (anyGlobal._ethers == null) {
|
||||
anyGlobal._ethers = ethers;
|
||||
}
|
||||
} catch (error) { }
|
||||
|
||||
export { ethers };
|
||||
|
||||
export {
|
||||
${targets}
|
||||
} from "./ethers";
|
||||
`;
|
||||
////////////////////
|
||||
// End template
|
||||
////////////////////
|
||||
fs.writeFileSync(resolve("packages/ethers/src.ts/index.ts"), output);
|
|
@ -0,0 +1 @@
|
|||
export {};
|
|
@ -0,0 +1,32 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const local_1 = require("../local");
|
||||
const log_1 = require("../log");
|
||||
const path_1 = require("../path");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const progress = log_1.getProgressBar(log_1.colorify.bold("Updating package.json hashes"));
|
||||
// Updating all tarball hashes now that versions have been updated
|
||||
for (let i = 0; i < path_1.dirnames.length; i++) {
|
||||
progress(i / path_1.dirnames.length);
|
||||
const dirname = path_1.dirnames[i];
|
||||
const tarballHash = local_1.computeTarballHash(dirname);
|
||||
//console.log(dirname, tarballHash);
|
||||
local_1.updateJson(path_1.getPackageJsonPath(dirname), { tarballHash });
|
||||
}
|
||||
progress(1);
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(error);
|
||||
process.exit(1);
|
||||
});
|
||||
;
|
|
@ -0,0 +1,2 @@
|
|||
export declare function getOrdered(skipNobuild?: boolean): Array<string>;
|
||||
export declare function sort(dirnames: Array<string>): void;
|
|
@ -0,0 +1,101 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path_1 = require("./path");
|
||||
const local_1 = require("./local");
|
||||
class OrderedSet {
|
||||
constructor() {
|
||||
this._keys = [];
|
||||
this._values = {};
|
||||
}
|
||||
add(key) {
|
||||
this._values[key] = true;
|
||||
this._keys = null;
|
||||
}
|
||||
contains(key) {
|
||||
return !!this._values[key];
|
||||
}
|
||||
_sort() {
|
||||
if (this._keys != null) {
|
||||
return;
|
||||
}
|
||||
this._keys = Object.keys(this._values);
|
||||
this._keys.sort();
|
||||
}
|
||||
get length() {
|
||||
this._sort();
|
||||
return this._keys.length;
|
||||
}
|
||||
get(index) {
|
||||
this._sort();
|
||||
return this._keys[index];
|
||||
}
|
||||
}
|
||||
function getOrdered(skipNobuild) {
|
||||
const packages = {};
|
||||
const filenames = {};
|
||||
// Maps packages to names to list of dependencies; { [ name:string]: Array<name: string> }
|
||||
const deps = {};
|
||||
let addDeps = (name, depends) => {
|
||||
Object.keys(depends).forEach((dep) => {
|
||||
// Not a package we manage
|
||||
if (packages[dep] == null) {
|
||||
return;
|
||||
}
|
||||
deps[name].add(dep);
|
||||
});
|
||||
};
|
||||
for (let i = 0; i < path_1.dirnames.length; i++) {
|
||||
let dirname = path_1.dirnames[i];
|
||||
let info = local_1.getPackage(dirname);
|
||||
if (skipNobuild && info._ethers_nobuild) {
|
||||
continue;
|
||||
}
|
||||
packages[info.name] = info;
|
||||
filenames[info.name] = dirname;
|
||||
}
|
||||
Object.keys(packages).forEach((name) => {
|
||||
let info = packages[name];
|
||||
deps[info.name] = new OrderedSet();
|
||||
addDeps(info.name, info.dependencies || {});
|
||||
addDeps(info.name, info.devDependencies || {});
|
||||
});
|
||||
let ordered = [];
|
||||
let remaining = Object.keys(deps);
|
||||
let isSatisfied = (name) => {
|
||||
for (let i = 0; i < deps[name].length; i++) {
|
||||
if (ordered.indexOf(deps[name].get(i)) === -1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
while (remaining.length) {
|
||||
let bail = true;
|
||||
for (let i = 0; i < remaining.length; i++) {
|
||||
if (!isSatisfied(remaining[i])) {
|
||||
continue;
|
||||
}
|
||||
bail = false;
|
||||
ordered.push(remaining[i]);
|
||||
remaining.splice(i, 1);
|
||||
break;
|
||||
}
|
||||
if (bail) {
|
||||
throw new Error("Nothing processed; circular dependencies...");
|
||||
}
|
||||
}
|
||||
return ordered.map((name) => filenames[name]);
|
||||
}
|
||||
exports.getOrdered = getOrdered;
|
||||
function sort(dirnames) {
|
||||
let ordered = getOrdered();
|
||||
dirnames.sort((a, b) => {
|
||||
let ai = ordered.indexOf(local_1.getPackage(a).name);
|
||||
let bi = ordered.indexOf(local_1.getPackage(b).name);
|
||||
if (ai === -1 || bi === -1) {
|
||||
throw new Error("unknown dirname - " + [a, b].join(", "));
|
||||
}
|
||||
return ai - bi;
|
||||
});
|
||||
}
|
||||
exports.sort = sort;
|
|
@ -0,0 +1,16 @@
|
|||
export declare type GetUrlResponse = {
|
||||
statusCode: number;
|
||||
statusMessage: string;
|
||||
headers: {
|
||||
[key: string]: string;
|
||||
};
|
||||
body: Uint8Array;
|
||||
};
|
||||
export declare type Options = {
|
||||
method?: string;
|
||||
body?: Uint8Array;
|
||||
headers?: {
|
||||
[key: string]: string;
|
||||
};
|
||||
};
|
||||
export declare function getUrl(href: string, options?: Options): Promise<GetUrlResponse>;
|
|
@ -0,0 +1,100 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const http_1 = __importDefault(require("http"));
|
||||
const https_1 = __importDefault(require("https"));
|
||||
const url_1 = require("url");
|
||||
function getResponse(request) {
|
||||
return new Promise((resolve, reject) => {
|
||||
request.once("response", (resp) => {
|
||||
const response = {
|
||||
statusCode: resp.statusCode,
|
||||
statusMessage: resp.statusMessage,
|
||||
headers: Object.keys(resp.headers).reduce((accum, name) => {
|
||||
let value = resp.headers[name];
|
||||
if (Array.isArray(value)) {
|
||||
value = value.join(", ");
|
||||
}
|
||||
accum[name] = value;
|
||||
return accum;
|
||||
}, {}),
|
||||
body: null
|
||||
};
|
||||
//resp.setEncoding("utf8");
|
||||
resp.on("data", (chunk) => {
|
||||
if (response.body == null) {
|
||||
response.body = new Uint8Array(0);
|
||||
}
|
||||
const body = new Uint8Array(response.body.length + chunk.length);
|
||||
body.set(response.body, 0);
|
||||
body.set(chunk, response.body.length);
|
||||
response.body = body;
|
||||
});
|
||||
resp.on("end", () => {
|
||||
resolve(response);
|
||||
});
|
||||
resp.on("error", (error) => {
|
||||
/* istanbul ignore next */
|
||||
error.response = response;
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
request.on("error", (error) => { reject(error); });
|
||||
});
|
||||
}
|
||||
// The URL.parse uses null instead of the empty string
|
||||
function nonnull(value) {
|
||||
if (value == null) {
|
||||
return "";
|
||||
}
|
||||
return value;
|
||||
}
|
||||
function getUrl(href, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (options == null) {
|
||||
options = {};
|
||||
}
|
||||
// @TODO: Once we drop support for node 8, we can pass the href
|
||||
// firectly into request and skip adding the components
|
||||
// to this request object
|
||||
const url = url_1.parse(href);
|
||||
const request = {
|
||||
protocol: nonnull(url.protocol),
|
||||
hostname: nonnull(url.hostname),
|
||||
port: nonnull(url.port),
|
||||
path: (nonnull(url.pathname) + nonnull(url.search)),
|
||||
method: (options.method || "GET"),
|
||||
headers: (options.headers || {}),
|
||||
};
|
||||
let req = null;
|
||||
switch (nonnull(url.protocol)) {
|
||||
case "http:":
|
||||
req = http_1.default.request(request);
|
||||
break;
|
||||
case "https:":
|
||||
req = https_1.default.request(request);
|
||||
break;
|
||||
default:
|
||||
/* istanbul ignore next */
|
||||
throw new Error(`unsupported protocol ${url.protocol}`);
|
||||
}
|
||||
if (options.body) {
|
||||
req.write(Buffer.from(options.body));
|
||||
}
|
||||
req.end();
|
||||
const response = yield getResponse(req);
|
||||
return response;
|
||||
});
|
||||
}
|
||||
exports.getUrl = getUrl;
|
|
@ -0,0 +1,18 @@
|
|||
export declare type Package = {
|
||||
dependencies: {
|
||||
[name: string]: string;
|
||||
};
|
||||
devDependencies: {
|
||||
[name: string]: string;
|
||||
};
|
||||
name: string;
|
||||
version: string;
|
||||
tarballHash: string;
|
||||
location: "remote" | "local";
|
||||
_ethers_nobuild: boolean;
|
||||
};
|
||||
export declare function getPackage(name: string): Package;
|
||||
export declare function updateJson(path: string, replace: Record<string, any>, sort?: boolean): void;
|
||||
export declare function getDependencies(name?: string, filter?: (name: string) => boolean): Record<string, string>;
|
||||
export declare function getPackList(name: string): Array<string>;
|
||||
export declare function computeTarballHash(name: string): string;
|
|
@ -0,0 +1,135 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = require("./path");
|
||||
const run_1 = require("./run");
|
||||
const utils_1 = require("./utils");
|
||||
function getPackage(name) {
|
||||
const value = utils_1.loadJson(path_1.getPackageJsonPath(name));
|
||||
return {
|
||||
name: value.name,
|
||||
version: value.version,
|
||||
dependencies: (value.dependencies || {}),
|
||||
devDependencies: (value.dependencies || {}),
|
||||
location: "local",
|
||||
tarballHash: (value.tarballHash || null),
|
||||
_ethers_nobuild: !!value._ethers_nobuild,
|
||||
};
|
||||
}
|
||||
exports.getPackage = getPackage;
|
||||
function updateJson(path, replace, sort) {
|
||||
const values = utils_1.loadJson(path);
|
||||
Object.keys(replace).forEach((key) => {
|
||||
const value = replace[key];
|
||||
if (value === undefined) {
|
||||
delete values[key];
|
||||
}
|
||||
else {
|
||||
values[key] = replace[key];
|
||||
}
|
||||
});
|
||||
utils_1.saveJson(path, values, !!sort);
|
||||
}
|
||||
exports.updateJson = updateJson;
|
||||
function getDependencies(name, filter) {
|
||||
if (name) {
|
||||
return utils_1.sortRecords(getPackage(name).dependencies);
|
||||
}
|
||||
// Find all versions for each package dependency
|
||||
const deps = path_1.dirnames.reduce((accum, dirname) => {
|
||||
const deps = getPackage(dirname).dependencies;
|
||||
Object.keys(deps).forEach((name) => {
|
||||
if (filter && !filter(name)) {
|
||||
return;
|
||||
}
|
||||
if (!accum[name]) {
|
||||
accum[name] = {};
|
||||
}
|
||||
accum[name][deps[name]] = true;
|
||||
});
|
||||
return accum;
|
||||
}, {});
|
||||
// Make sure each package dependency only has 1 version
|
||||
return utils_1.sortRecords(Object.keys(deps).reduce((accum, name) => {
|
||||
const versions = Object.keys(deps[name]);
|
||||
if (versions.length > 1) {
|
||||
throw new Error(`cannot depend on multiple versions for ${JSON.stringify(name)}: ${versions.map(v => JSON.stringify(v)).join(", ")}`);
|
||||
}
|
||||
accum[name] = versions[0];
|
||||
return accum;
|
||||
}, {}));
|
||||
}
|
||||
exports.getDependencies = getDependencies;
|
||||
function getPackList(name) {
|
||||
const result = run_1.run("npm", ["pack", "--json", path_1.getPackagePath(name), "--dry-run"]);
|
||||
if (!result.ok) {
|
||||
const error = new Error(`failed to run npm pack: ${name}`);
|
||||
error.result = result;
|
||||
throw error;
|
||||
}
|
||||
return JSON.parse(result.stdout)[0].files.map((info) => info.path);
|
||||
}
|
||||
exports.getPackList = getPackList;
|
||||
/*
|
||||
export function getTarball(name: string): Buffer {
|
||||
const files = getPackList(name).map((name) => `./${ name }`);
|
||||
files.sort((a, b) => {
|
||||
|
||||
const compsA = a.split("/"), compsB = b.split("/");
|
||||
while (true) {
|
||||
const a = compsA.shift(), b = compsB.shift();
|
||||
if (a === b) { continue; }
|
||||
|
||||
if (compsA.length === 0 && compsB.length === 0) {
|
||||
if (a < b) { return -1; }
|
||||
if (a > b) { return 1; }
|
||||
break;
|
||||
}
|
||||
|
||||
if (compsA.length === 0) { return -1; }
|
||||
if (compsB.length === 0) { return 1; }
|
||||
|
||||
if (a < b) { return -1; }
|
||||
if (a > b) { return 1; }
|
||||
}
|
||||
|
||||
return 0;
|
||||
});
|
||||
|
||||
return tar.create({
|
||||
sync: true,
|
||||
cwd: getPackagePath(name),
|
||||
prefix: "package/",
|
||||
gzip: true,
|
||||
portable: true,
|
||||
// Provide a specific date in the 1980s for the benefit of zip,
|
||||
// which is confounded by files dated at the Unix epoch 0.
|
||||
mtime: new Date('1985-10-26T08:15:00.000Z'),
|
||||
}, files).read();
|
||||
}
|
||||
*/
|
||||
function computeTarballHash(name) {
|
||||
// Sort the files to get a consistent hash
|
||||
const files = getPackList(name);
|
||||
files.sort();
|
||||
// Compute the hash for each file
|
||||
const packageRoot = path_1.getPackagePath(name);
|
||||
const hashes = files.reduce((accum, filename) => {
|
||||
let content = fs_1.default.readFileSync(path_1.resolve(packageRoot, filename));
|
||||
// The package.json includes the hash, so we need to nix it to get a consistent hash
|
||||
if (filename === "package.json") {
|
||||
const info = JSON.parse(content.toString());
|
||||
delete info.tarballHash;
|
||||
content = Buffer.from(JSON.stringify(info, null, 2));
|
||||
}
|
||||
accum[filename] = utils_1.sha256(content);
|
||||
return accum;
|
||||
}, {});
|
||||
return utils_1.sha256(Buffer.from("{" + files.map((filename) => {
|
||||
return `${JSON.stringify(filename)}:"${hashes[filename]}"`;
|
||||
}).join(",") + "}"));
|
||||
}
|
||||
exports.computeTarballHash = computeTarballHash;
|
|
@ -0,0 +1,5 @@
|
|||
export declare function getProgressBar(action: string): (percent: number) => void;
|
||||
export declare type ColorifyFunc = (text: string) => string;
|
||||
export declare const colorify: {
|
||||
[format: string]: ColorifyFunc;
|
||||
};
|
|
@ -0,0 +1,59 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// See: https://stackoverflow.com/questions/9781218/how-to-change-node-jss-console-font-color
|
||||
let disableColor = !(process.stdout.isTTY);
|
||||
function getProgressBar(action) {
|
||||
let lastProgress = -1;
|
||||
return function (percent) {
|
||||
const progress = Math.trunc(percent * 100);
|
||||
if (disableColor) {
|
||||
if (lastProgress === -1) {
|
||||
console.log(action + "...");
|
||||
}
|
||||
lastProgress = progress;
|
||||
return;
|
||||
}
|
||||
//process.stdin.setRawMode(false);
|
||||
//process.stdin.pause();
|
||||
if (progress === lastProgress || lastProgress === 1) {
|
||||
return;
|
||||
}
|
||||
lastProgress = progress;
|
||||
(process.stdout).clearLine();
|
||||
(process.stdout).cursorTo(0);
|
||||
process.stdout.write(action + "... " + progress + "%");
|
||||
if (percent === 1) {
|
||||
process.stdout.write('\n');
|
||||
}
|
||||
};
|
||||
}
|
||||
exports.getProgressBar = getProgressBar;
|
||||
const colorSequences = {
|
||||
blue: "\x1b[34m",
|
||||
cyan: "\x1b[36m",
|
||||
green: "\x1b[32m",
|
||||
magenta: "\x1b[35m",
|
||||
red: "\x1b[31m",
|
||||
yellow: "\x1b[33m",
|
||||
bold: ""
|
||||
};
|
||||
function getColor(color) {
|
||||
if (!color || color === "normal") {
|
||||
return "\x1b[0m";
|
||||
}
|
||||
return "\x1b[1m" + colorSequences[color];
|
||||
}
|
||||
function _colorify(format) {
|
||||
return function (text) {
|
||||
if (disableColor) {
|
||||
return text;
|
||||
}
|
||||
return getColor(format) + text.replace(/[^ -~]+/g, "") + getColor();
|
||||
};
|
||||
}
|
||||
exports.colorify = Object.freeze({
|
||||
bold: _colorify("bold"),
|
||||
blue: _colorify("blue"),
|
||||
green: _colorify("green"),
|
||||
red: _colorify("red"),
|
||||
});
|
|
@ -0,0 +1,2 @@
|
|||
import { Package } from "./local";
|
||||
export declare function getPackage(name: string, version?: string): Promise<Package>;
|
|
@ -0,0 +1,61 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const semver_1 = __importDefault(require("semver"));
|
||||
const geturl_1 = require("./geturl");
|
||||
const local_1 = require("./local");
|
||||
const cache = {};
|
||||
function getPackageInfo(name) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Convert dirname to package if needed
|
||||
name = local_1.getPackage(name).name;
|
||||
if (!cache[name]) {
|
||||
try {
|
||||
const result = yield geturl_1.getUrl("http:/" + "/registry.npmjs.org/" + name);
|
||||
cache[name] = JSON.parse(Buffer.from(result.body).toString("utf8"));
|
||||
}
|
||||
catch (error) {
|
||||
if (error.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
return cache[name] || null;
|
||||
});
|
||||
}
|
||||
function getPackage(name, version) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const infos = yield getPackageInfo(name);
|
||||
if (infos == null) {
|
||||
return null;
|
||||
}
|
||||
if (version == null) {
|
||||
const versions = Object.keys(infos.versions);
|
||||
versions.sort(semver_1.default.compare);
|
||||
version = versions.pop();
|
||||
}
|
||||
const info = infos.versions[version];
|
||||
return {
|
||||
dependencies: (info.dependencies || {}),
|
||||
devDependencies: (info.devDependencies || {}),
|
||||
location: "remote",
|
||||
name: info.name,
|
||||
tarballHash: info.tarballHash,
|
||||
version: info.version,
|
||||
_ethers_nobuild: !!info._ethers_nobuild,
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.getPackage = getPackage;
|
|
@ -1,27 +1,13 @@
|
|||
export declare const root: string;
|
||||
export declare function resolve(...args: Array<string>): string;
|
||||
export declare const dirs: Readonly<{
|
||||
rootPackageJsonPath: string;
|
||||
packages: string;
|
||||
root: string;
|
||||
}>;
|
||||
export declare type Package = {
|
||||
dependencies: {
|
||||
[name: string]: string;
|
||||
};
|
||||
devDependencies: {
|
||||
[name: string]: string;
|
||||
};
|
||||
name: string;
|
||||
version: string;
|
||||
};
|
||||
export declare const dirnames: ReadonlyArray<string>;
|
||||
export declare const packages: ReadonlyArray<string>;
|
||||
export declare function atomicWrite(path: string, value: string | Uint8Array): void;
|
||||
export declare function loadJson(path: string): any;
|
||||
export declare function getPackagePath(name: string): string;
|
||||
export declare function getDirname(name: string): string;
|
||||
export declare function getPackageJsonPath(name: string): string;
|
||||
export declare function getPackage(name: string): Package;
|
||||
export declare function getDependencies(name?: string): Record<string, string>;
|
||||
export declare function isEthers(name: string): boolean;
|
||||
export declare function updateJson(path: string, replace: Record<string, any>, sort?: boolean): void;
|
||||
|
|
|
@ -6,8 +6,13 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = require("path");
|
||||
exports.root = path_1.resolve(__dirname, "../../../");
|
||||
const pathRootPackageJsonPath = path_1.resolve(exports.root, "package.json");
|
||||
const pathPackages = path_1.resolve(exports.root, "packages");
|
||||
function resolve(...args) {
|
||||
args.unshift(exports.root);
|
||||
return path_1.resolve.apply(null, args);
|
||||
}
|
||||
exports.resolve = resolve;
|
||||
const pathRootPackageJsonPath = resolve("package.json");
|
||||
const pathPackages = resolve("packages");
|
||||
exports.dirs = Object.freeze({
|
||||
rootPackageJsonPath: pathRootPackageJsonPath,
|
||||
packages: pathPackages,
|
||||
|
@ -26,16 +31,6 @@ const packageLookup = exports.dirnames.reduce((accum, dirname) => {
|
|||
return accum;
|
||||
}, {});
|
||||
exports.packages = Object.freeze(exports.dirnames.map((dirname) => packageLookup[dirname].packageName));
|
||||
function atomicWrite(path, value) {
|
||||
const tmp = path_1.resolve(exports.dirs.root, ".atomic-tmp");
|
||||
fs_1.default.writeFileSync(tmp, value);
|
||||
fs_1.default.renameSync(tmp, path);
|
||||
}
|
||||
exports.atomicWrite = atomicWrite;
|
||||
function loadJson(path) {
|
||||
return JSON.parse(fs_1.default.readFileSync(path).toString());
|
||||
}
|
||||
exports.loadJson = loadJson;
|
||||
function getPackageInfo(name) {
|
||||
const value = packageLookup[name];
|
||||
if (!value) {
|
||||
|
@ -55,73 +50,7 @@ function getPackageJsonPath(name) {
|
|||
return getPackageInfo(name).packageJsonPath;
|
||||
}
|
||||
exports.getPackageJsonPath = getPackageJsonPath;
|
||||
function getPackage(name) {
|
||||
const value = loadJson(getPackageJsonPath(name));
|
||||
return {
|
||||
name: value.name,
|
||||
version: value.version,
|
||||
dependencies: (value.dependencies || {}),
|
||||
devDependencies: (value.dependencies || {}),
|
||||
};
|
||||
}
|
||||
exports.getPackage = getPackage;
|
||||
function sortRecords(record) {
|
||||
const keys = Object.keys(record);
|
||||
keys.sort();
|
||||
return keys.reduce((accum, name) => {
|
||||
accum[name] = record[name];
|
||||
return accum;
|
||||
}, {});
|
||||
}
|
||||
function getDependencies(name) {
|
||||
if (name) {
|
||||
return sortRecords(getPackage(name).dependencies);
|
||||
}
|
||||
// Find all versions for each package dependency
|
||||
const deps = exports.dirnames.reduce((accum, dirname) => {
|
||||
const deps = getPackage(dirname).dependencies;
|
||||
Object.keys(deps).forEach((name) => {
|
||||
if (!accum[name]) {
|
||||
accum[name] = {};
|
||||
}
|
||||
accum[name][deps[name]] = true;
|
||||
});
|
||||
return accum;
|
||||
}, {});
|
||||
// Make sure each package dependency only has 1 version
|
||||
return sortRecords(Object.keys(deps).reduce((accum, name) => {
|
||||
const versions = Object.keys(deps[name]);
|
||||
if (versions.length > 1) {
|
||||
throw new Error(`cannot depend on multiple versions for ${JSON.stringify(name)}: ${versions.map(v => JSON.stringify(v)).join(", ")}`);
|
||||
}
|
||||
accum[name] = versions[0];
|
||||
return accum;
|
||||
}, {}));
|
||||
}
|
||||
exports.getDependencies = getDependencies;
|
||||
function isEthers(name) {
|
||||
return !!packageLookup[name];
|
||||
}
|
||||
exports.isEthers = isEthers;
|
||||
function updateJson(path, replace, sort) {
|
||||
const values = loadJson(path);
|
||||
Object.keys(replace).forEach((key) => {
|
||||
values[key] = replace[key];
|
||||
});
|
||||
let replacer = null;
|
||||
if (sort) {
|
||||
replacer = (key, value) => {
|
||||
if (typeof (value) === "object") {
|
||||
const keys = Object.keys(value);
|
||||
keys.sort();
|
||||
return keys.reduce((accum, key) => {
|
||||
accum[key] = value[key];
|
||||
return accum;
|
||||
}, {});
|
||||
}
|
||||
return value;
|
||||
};
|
||||
}
|
||||
atomicWrite(path, JSON.stringify(values, replacer, 2));
|
||||
}
|
||||
exports.updateJson = updateJson;
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
/// <reference types="node" />
|
||||
export declare type RunResult = {
|
||||
stderr: string | null;
|
||||
_stderr: string | Buffer;
|
||||
stdout: string;
|
||||
_stdout: string | Buffer;
|
||||
status: number;
|
||||
ok: boolean;
|
||||
};
|
||||
export declare function run(progname: string, args?: Array<string>, currentWorkingDirectory?: string): RunResult;
|
|
@ -0,0 +1,64 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const child_process_1 = require("child_process");
|
||||
function run(progname, args, currentWorkingDirectory) {
|
||||
if (args == null) {
|
||||
args = [];
|
||||
}
|
||||
const options = {};
|
||||
if (currentWorkingDirectory) {
|
||||
options.cwd = currentWorkingDirectory;
|
||||
}
|
||||
const child = child_process_1.spawnSync(progname, args, options);
|
||||
const result = {
|
||||
_stderr: child.stderr,
|
||||
stderr: (child.stderr.toString() || null),
|
||||
_stdout: child.stdout,
|
||||
stdout: child.stdout.toString(),
|
||||
status: child.status,
|
||||
ok: (child.stderr.length === 0 && child.status === 0)
|
||||
};
|
||||
if (child.error) {
|
||||
(child.error).result = result;
|
||||
throw child.error;
|
||||
}
|
||||
return result;
|
||||
/*
|
||||
const result: RunResult = {
|
||||
stderr: null,
|
||||
_stderr: Buffer.from([]),
|
||||
stdout: null,
|
||||
_stdout: Buffer.from([]),
|
||||
status: null,
|
||||
ok: false,
|
||||
};
|
||||
|
||||
proc.stderr.on("data", (data) => {
|
||||
result._stderr = Buffer.concat([ result._stderr, data ]);
|
||||
});
|
||||
|
||||
proc.stdout.on("data", (data) => {
|
||||
result._stdout = Buffer.concat([ result._stdout, data ]);
|
||||
});
|
||||
|
||||
proc.on("error", (error) => {
|
||||
result.stderr = result._stderr.toString("utf8");
|
||||
result.stdout = result._stdout.toString("utf8");
|
||||
(<any>error).result = result;
|
||||
|
||||
console.log("Error:", error);
|
||||
|
||||
reject(error);
|
||||
});
|
||||
|
||||
proc.on("close", (code) => {
|
||||
result.stderr = result._stderr.toString("utf8");
|
||||
result.stdout = result._stdout.toString("utf8");
|
||||
result.status = code;
|
||||
result.ok = (result._stderr.length === 0 && code === 0);
|
||||
resolve(result);
|
||||
});
|
||||
});
|
||||
*/
|
||||
}
|
||||
exports.run = run;
|
|
@ -0,0 +1,7 @@
|
|||
/// <reference types="node" />
|
||||
export declare function repeat(char: string, length: number): string;
|
||||
export declare function sha256(content: Buffer): string;
|
||||
export declare function sortRecords(record: Record<string, any>): Record<string, any>;
|
||||
export declare function atomicWrite(path: string, value: string | Uint8Array): void;
|
||||
export declare function loadJson(path: string): any;
|
||||
export declare function saveJson(filename: string, data: any, sort?: boolean): any;
|
|
@ -0,0 +1,65 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = require("path");
|
||||
const crypto_1 = require("crypto");
|
||||
function repeat(char, length) {
|
||||
if (char.length === 0) {
|
||||
return "";
|
||||
}
|
||||
let output = char;
|
||||
while (output.length < length) {
|
||||
output = output + output;
|
||||
}
|
||||
return output.substring(0, length);
|
||||
}
|
||||
exports.repeat = repeat;
|
||||
function sha256(content) {
|
||||
const hasher = crypto_1.createHash("sha256");
|
||||
hasher.update(content);
|
||||
return "0x" + hasher.digest("hex");
|
||||
}
|
||||
exports.sha256 = sha256;
|
||||
function sortRecords(record) {
|
||||
const keys = Object.keys(record);
|
||||
keys.sort();
|
||||
return keys.reduce((accum, name) => {
|
||||
accum[name] = record[name];
|
||||
return accum;
|
||||
}, {});
|
||||
}
|
||||
exports.sortRecords = sortRecords;
|
||||
function atomicWrite(path, value) {
|
||||
const tmp = path_1.resolve(__dirname, "../../../.atomic-tmp");
|
||||
fs_1.default.writeFileSync(tmp, value);
|
||||
fs_1.default.renameSync(tmp, path);
|
||||
}
|
||||
exports.atomicWrite = atomicWrite;
|
||||
function loadJson(path) {
|
||||
return JSON.parse(fs_1.default.readFileSync(path).toString());
|
||||
}
|
||||
exports.loadJson = loadJson;
|
||||
function saveJson(filename, data, sort) {
|
||||
let replacer = undefined;
|
||||
if (sort) {
|
||||
replacer = (key, value) => {
|
||||
if (Array.isArray(value)) {
|
||||
// pass
|
||||
}
|
||||
else if (value && typeof (value) === "object") {
|
||||
const keys = Object.keys(value);
|
||||
keys.sort();
|
||||
return keys.reduce((accum, key) => {
|
||||
accum[key] = value[key];
|
||||
return accum;
|
||||
}, {});
|
||||
}
|
||||
return value;
|
||||
};
|
||||
}
|
||||
atomicWrite(filename, JSON.stringify(data, replacer, 2) + "\n");
|
||||
}
|
||||
exports.saveJson = saveJson;
|
|
@ -0,0 +1,47 @@
|
|||
import { dirnames, getPackageJsonPath, resolve } from "./path";
|
||||
import { loadJson, saveJson } from "./utils";
|
||||
|
||||
function setupConfig(outDir: string, moduleType: string, targetType: string) {
|
||||
|
||||
// Configure the tsconfit.package.json...
|
||||
const path = resolve("tsconfig.package.json");
|
||||
const content = loadJson(path);
|
||||
content.compilerOptions.module = moduleType;
|
||||
content.compilerOptions.target = targetType;
|
||||
saveJson(path, content, true);
|
||||
|
||||
// Configure the browser field for every pacakge, copying the
|
||||
// browser.umd filed for UMD and browser.esm for ESM
|
||||
dirnames.forEach((dirname) => {
|
||||
const filename = getPackageJsonPath(dirname)
|
||||
const info = loadJson(filename);
|
||||
|
||||
if (info._ethers_nobuild) { return; }
|
||||
|
||||
if (targetType === "es2015") {
|
||||
if (info["browser.esm"]) {
|
||||
info.browser = info["browser.esm"];
|
||||
}
|
||||
} else if (targetType === "es5") {
|
||||
if (info["browser.umd"]) {
|
||||
info.browser = info["browser.umd"];
|
||||
}
|
||||
} else {
|
||||
throw new Error("unsupported target");
|
||||
}
|
||||
saveJson(filename, info, true);
|
||||
|
||||
let path = resolve("packages", dirname, "tsconfig.json");
|
||||
let content = loadJson(path);
|
||||
content.compilerOptions.outDir = outDir;
|
||||
saveJson(path, content, true);
|
||||
});
|
||||
}
|
||||
|
||||
export function setupBuild(buildModule: boolean): void {
|
||||
if (buildModule) {
|
||||
setupConfig("./lib.esm/", "es2015", "es2015");
|
||||
} else {
|
||||
setupConfig("./lib/", "commonjs", "es5");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
|
||||
export function foo() { }
|
|
@ -0,0 +1,75 @@
|
|||
import fs from "fs";
|
||||
|
||||
import semver from "semver";
|
||||
|
||||
import { dirnames, getPackageJsonPath, getPackagePath, resolve } from "../path";
|
||||
import * as local from "../local";
|
||||
import { colorify, getProgressBar } from "../log";
|
||||
import * as npm from "../npm";
|
||||
import { loadJson, repeat, saveJson } from "../utils";
|
||||
|
||||
(async function() {
|
||||
const progress = getProgressBar(colorify.bold("Bumping package.json versions"));
|
||||
|
||||
const latestVersions: Record<string, string> = { };
|
||||
let updated = false;
|
||||
|
||||
const output: Array<string> = [ ];
|
||||
|
||||
// For each package, detect diff between tarball and remote
|
||||
for (let i = 0; i < dirnames.length; i++) {
|
||||
progress(i / dirnames.length);
|
||||
|
||||
const dirname = dirnames[i];
|
||||
|
||||
const pLocal = local.getPackage(dirname);
|
||||
const pNpm = await npm.getPackage(dirname);
|
||||
|
||||
const tarballHash = local.computeTarballHash(dirname);
|
||||
|
||||
let version = pNpm.version;
|
||||
|
||||
if (tarballHash !== pNpm.tarballHash) {
|
||||
version = semver.inc(version, "patch");
|
||||
|
||||
output.push([
|
||||
" ",
|
||||
colorify.blue(pLocal.name),
|
||||
repeat(" ", 47 - pLocal.name.length - pNpm.version.length),
|
||||
pNpm.version,
|
||||
colorify.bold(" => "),
|
||||
colorify.green(version)
|
||||
].join(""));
|
||||
|
||||
local.updateJson(getPackageJsonPath(dirname), { tarballHash, version });
|
||||
|
||||
updated = true;
|
||||
}
|
||||
|
||||
latestVersions[pLocal.name] = version;
|
||||
|
||||
// Write out the _version.ts
|
||||
if (!pLocal._ethers_nobuild) {
|
||||
const code = "export const version = " + JSON.stringify(dirname + "/" + pLocal.version) + ";\n";
|
||||
fs.writeFileSync(resolve(getPackagePath(dirname), "src.ts/_version.ts"), code);
|
||||
}
|
||||
}
|
||||
progress(1);
|
||||
|
||||
if (updated) {
|
||||
const filename = resolve("packages/ethers/package.json")
|
||||
const info = loadJson(filename);
|
||||
Object.keys(info.dependencies).forEach((name) => {
|
||||
const version = latestVersions[name];
|
||||
if (name == null) { return; }
|
||||
info.dependencies[name] = version;
|
||||
});
|
||||
saveJson(filename, info);
|
||||
}
|
||||
|
||||
output.forEach((line) => { console.log(line); });
|
||||
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${ process.argv[0] }: ${ error.message }`);
|
||||
process.exit(1);
|
||||
});
|
|
@ -1,15 +1,13 @@
|
|||
|
||||
import { dirs, getDependencies, isEthers, updateJson } from "../path";
|
||||
import { dirs, isEthers } from "../path";
|
||||
import { getDependencies, updateJson } from "../local";
|
||||
|
||||
(async function() {
|
||||
const deps = getDependencies();
|
||||
|
||||
const dependencies = Object.keys(deps).reduce((accum, name) => {
|
||||
if (!isEthers(name)) {
|
||||
accum[name] = deps[name];
|
||||
}
|
||||
return accum;
|
||||
}, <{ [ name: string ]: string }>{ });
|
||||
|
||||
const dependencies = getDependencies(null, (name: string) => {
|
||||
return !isEthers(name);
|
||||
});
|
||||
updateJson(dirs.rootPackageJsonPath, { dependencies });
|
||||
})();
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${ process.argv[0] }: ${ error.message }`);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import fs from "fs";
|
||||
import { dirname, resolve } from "path";
|
||||
|
||||
import { dirs, getDependencies, getDirname, getPackagePath, packages } from "../path";
|
||||
import { dirs, getDirname, getPackagePath, packages } from "../path";
|
||||
import { getDependencies } from "../local";
|
||||
|
||||
function link(existing: string, path: string): void {
|
||||
try {
|
||||
|
@ -45,4 +46,7 @@ function link(existing: string, path: string): void {
|
|||
});
|
||||
});
|
||||
|
||||
})();
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${ process.argv[0] }: ${ error.message }`);
|
||||
process.exit(1);
|
||||
});;
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
"use strict";
|
||||
|
||||
const { major } = require("semver");
|
||||
|
||||
// This should be used like `node npm-skip-node8 || COMMAND`.
|
||||
// - If node 8, this script returns true, skipping COMMAND
|
||||
// - Otherwise, return false, running COMMAND
|
||||
|
||||
if (major(process.version) > 8) {
|
||||
// Node >8; return "false" (wrt to shell scripting)
|
||||
process.exit(1);
|
||||
} else {
|
||||
// Node 8; return "true" (wrt to shell scripting)
|
||||
process.exit(0);
|
||||
}
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
import fs from "fs";
|
||||
import { createServer, Server } from "http";
|
||||
import { resolve } from "path";
|
||||
|
||||
export function getMime(filename: string): string {
|
||||
switch (filename.split('.').pop().toLowerCase()) {
|
||||
case 'css': return 'text/css';
|
||||
case 'doctree': return 'application/x-doctree';
|
||||
case 'eot': return 'application/vnd.ms-fontobject';
|
||||
case 'gif': return 'image/gif';
|
||||
case 'html': return 'text/html';
|
||||
case 'js': return 'application/javascript';
|
||||
case 'jpg': return 'image/jpeg';
|
||||
case 'jpeg': return 'image/jpeg';
|
||||
case 'md': return 'text/markdown';
|
||||
case 'pickle': return 'application/x-pickle';
|
||||
case 'png': return 'image/png';
|
||||
case 'svg': return 'image/svg+xml';
|
||||
case 'ttf': return 'application/x-font-ttf';
|
||||
case 'txt': return 'text/plain';
|
||||
case 'woff': return 'application/font-woff';
|
||||
}
|
||||
console.log('NO MIME', filename);
|
||||
|
||||
return "application/octet-stream";
|
||||
}
|
||||
|
||||
export type Options = {
|
||||
port?: number;
|
||||
redirects?: Record<string, string>;
|
||||
};
|
||||
|
||||
export function start(root: string, options: Options): Server {
|
||||
if (root == null) { throw new Error("root required"); }
|
||||
if (options == null) { options = { }; }
|
||||
if (options.port == null) { options.port = 8000; }
|
||||
root = resolve(root);
|
||||
|
||||
const server = createServer((req, resp) => {
|
||||
|
||||
// Follow redirects in options
|
||||
if (options.redirects && options.redirects[req.url]) {
|
||||
resp.writeHead(301, { Location: options.redirects[req.url] });
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
|
||||
let filename = resolve(root, "." + req.url);
|
||||
|
||||
// Make sure we aren't crawling out of our sandbox
|
||||
if (req.url[0] !== "/" || filename.substring(0, filename.length) !== filename) {
|
||||
resp.writeHead(403);
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const stat = fs.statSync(filename);
|
||||
if (stat.isDirectory()) {
|
||||
|
||||
// Redirect bare directory to its path (i.e. "/foo" => "/foo/")
|
||||
if (req.url[req.url.length - 1] !== "/") {
|
||||
resp.writeHead(301, { Location: req.url + "/" });
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
|
||||
filename += "/index.html";
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(filename);
|
||||
|
||||
resp.writeHead(200, {
|
||||
"Content-Length": content.length,
|
||||
"Content-Type": getMime(filename)
|
||||
});
|
||||
resp.end(content);
|
||||
return;
|
||||
|
||||
} catch (error) {
|
||||
if (error.code === "ENOENT") {
|
||||
resp.writeHead(404, { });
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
|
||||
resp.writeHead(500, { });
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
server.listen(options.port, () => {
|
||||
console.log(`Server running on: http://localhost:${ options.port }`);
|
||||
});
|
||||
|
||||
return server;
|
||||
}
|
||||
|
||||
start(resolve(__dirname, "../../docs"), {
|
||||
redirects: {
|
||||
"/": "/v5/"
|
||||
}
|
||||
});
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
import { setupBuild } from "../build";
|
||||
import { colorify } from "../log";
|
||||
import { getPackageJsonPath } from "../path";
|
||||
import { loadJson, saveJson } from "../utils";
|
||||
|
||||
(async function() {
|
||||
process.argv.slice(2).forEach((arg) => {
|
||||
console.log(colorify.bold("Setting Option:"), arg);
|
||||
switch(arg) {
|
||||
case "esm":
|
||||
setupBuild(true);
|
||||
break;
|
||||
|
||||
case "cjs":
|
||||
setupBuild(false);
|
||||
break;
|
||||
|
||||
// This will remove the browser field entirely, so make sure
|
||||
// to set esm of cjs first as they will restore the browser
|
||||
// field
|
||||
case "browser-lang-all": {
|
||||
const filename = getPackageJsonPath("wordlists");
|
||||
const info = loadJson(filename);
|
||||
delete info.browser;
|
||||
saveJson(filename, info, true);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown option: ${ JSON.stringify(arg) }`);
|
||||
}
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${ process.argv[0] }: ${ error.message }`);
|
||||
process.exit(1);
|
||||
});
|
|
@ -1,12 +1,14 @@
|
|||
"use strict";
|
||||
|
||||
const { resolve } = require("path");
|
||||
const fs = require("fs");
|
||||
import fs from "fs";
|
||||
import { resolve } from "path";
|
||||
|
||||
import ts from "typescript";
|
||||
|
||||
const Words = fs.readFileSync("/usr/share/dict/words").toString().split("\n").reduce((accum, word) => {
|
||||
accum[word.toLowerCase()] = true;
|
||||
return accum;
|
||||
}, { });
|
||||
}, <Record<string, boolean>>{ });
|
||||
|
||||
`
|
||||
// Words missing from the dictionary
|
||||
|
@ -79,20 +81,28 @@ OYAa IJBEJqXZJ
|
|||
Words[word.toLowerCase()] = true;
|
||||
});
|
||||
|
||||
const ts = require("typescript");
|
||||
type Bar = {
|
||||
value: string;
|
||||
lineNo: number;
|
||||
}
|
||||
|
||||
function getStrings(source) {
|
||||
const sourceFile = ts.createSourceFile("filename.ts", source);
|
||||
type Foo = {
|
||||
filename: string;
|
||||
values: Array<Bar>;
|
||||
};
|
||||
|
||||
const result = [ ];
|
||||
function getStrings(source: string): Array<Bar> {
|
||||
const sourceFile = ts.createSourceFile("filename.ts", source, ts.ScriptTarget.Latest);
|
||||
|
||||
function add(value, pos) {
|
||||
const result: Array<Bar> = [ ];
|
||||
|
||||
function add(value: string, pos: number): void {
|
||||
const lineNo = sourceFile.getLineAndCharacterOfPosition(pos).line + 1;
|
||||
result.push({ value, lineNo });
|
||||
}
|
||||
|
||||
let lastClass = null, lastEnum = null;
|
||||
function visit(node, depth) {
|
||||
//let lastClass = null, lastEnum = null;
|
||||
function visit(node: ts.Node, depth: number): void {
|
||||
switch (node.kind) {
|
||||
//case ts.SyntaxKind.TemplateExpression:
|
||||
// if (node.head) { visit(node.head); }
|
||||
|
@ -103,7 +113,7 @@ function getStrings(source) {
|
|||
case ts.SyntaxKind.TemplateTail:
|
||||
case ts.SyntaxKind.StringLiteral:
|
||||
case ts.SyntaxKind.NoSubstitutionTemplateLiteral:
|
||||
add(node.text, node.pos);
|
||||
add((<ts.LiteralLikeNode>node).text, node.pos);
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -118,10 +128,11 @@ function getStrings(source) {
|
|||
const Include = new RegExp("packages/.*/src.ts/.*\.ts$");
|
||||
const Exclude = new RegExp("/node_modules/|src.ts/.*browser.*");
|
||||
|
||||
function getAllStrings(path) {
|
||||
|
||||
function getAllStrings(path: string): Array<Foo> {
|
||||
const Root = resolve(__dirname, path);
|
||||
|
||||
const readdir = function(path) {
|
||||
const readdir = function(path: string): Array<Foo> {
|
||||
if (path.match(Exclude)) { return [ ]; }
|
||||
|
||||
const stat = fs.statSync(path);
|
||||
|
@ -145,7 +156,7 @@ function getAllStrings(path) {
|
|||
return readdir(Root);
|
||||
}
|
||||
|
||||
function checkWord(word) {
|
||||
function checkWord(word: string): boolean {
|
||||
word = word.toLowerCase();
|
||||
|
||||
// A word
|
||||
|
@ -156,20 +167,22 @@ function checkWord(word) {
|
|||
|
||||
// Hex string
|
||||
if (word.match(/^(0x)?[0-9a-f]*$/i)) { return true; }
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function starts(text, prefix) {
|
||||
function starts(text: string, prefix: string): boolean {
|
||||
return (text.substring(0, prefix.length) === prefix);
|
||||
}
|
||||
|
||||
(async function() {
|
||||
let count = 0;
|
||||
getAllStrings(resolve(__dirname, "../../packages")).forEach((file) => {
|
||||
getAllStrings(resolve(__dirname, "../../../../packages")).forEach((file: Foo) => {
|
||||
if (starts(file.filename, "/testcases/src.ts/generation-scripts")) { return; }
|
||||
if (starts(file.filename, "/asm/src.ts/opcodes.ts")) { return; }
|
||||
|
||||
file.values.forEach((entry) => {
|
||||
function problem(word) {
|
||||
function problem(word: string): void {
|
||||
count++;
|
||||
console.log({
|
||||
filename: file.filename,
|
||||
|
@ -196,16 +209,23 @@ function starts(text, prefix) {
|
|||
value.replace(/([a-z+])([A-Z])/g, (all, first, secondLetter) => {
|
||||
return first + " " + secondLetter;
|
||||
}).replace(/((?:0x)?[A-Za-z]+)/gi, (all, word) => {
|
||||
if (checkWord(word)) { return; }
|
||||
if (checkWord(word)) { return ""; }
|
||||
problem(word);
|
||||
return "";
|
||||
});;
|
||||
});
|
||||
});
|
||||
|
||||
if (count) {
|
||||
console.log(`Found ${ count } typos.`);
|
||||
process.exit(1)
|
||||
}
|
||||
process.exit(0)
|
||||
})();
|
||||
|
||||
|
||||
process.exit(0)
|
||||
})().catch((error) => {
|
||||
console.log(error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
import { getOrdered } from "../depgraph";
|
||||
import { resolve } from "../path";
|
||||
import { updateJson } from "../local";
|
||||
|
||||
(async function() {
|
||||
const ordered = getOrdered(true);
|
||||
|
||||
updateJson(resolve("tsconfig.project.json"), {
|
||||
references: ordered.map((name) => ({ path: ("./packages/" + name) }))
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
"use strict";
|
||||
|
||||
const fs = require("fs");
|
||||
|
||||
const { resolve } = require("../path");
|
||||
|
||||
const sourceEthers = fs.readFileSync(resolve("packages/ethers/src.ts/ethers.ts")).toString();
|
||||
const targets = sourceEthers.match(/export\s*{\s*((.|\s)*)}/)[1].trim();
|
||||
|
||||
////////////////////
|
||||
// Begin template
|
||||
////////////////////
|
||||
|
||||
const output = `"use strict";
|
||||
|
||||
// To modify this file, you must update ./misc/admin/lib/cmds/update-exports.js
|
||||
|
||||
import * as ethers from "./ethers";
|
||||
|
||||
try {
|
||||
const anyGlobal = (window as any);
|
||||
|
||||
if (anyGlobal._ethers == null) {
|
||||
anyGlobal._ethers = ethers;
|
||||
}
|
||||
} catch (error) { }
|
||||
|
||||
export { ethers };
|
||||
|
||||
export {
|
||||
${ targets }
|
||||
} from "./ethers";
|
||||
`;
|
||||
|
||||
////////////////////
|
||||
// End template
|
||||
////////////////////
|
||||
|
||||
fs.writeFileSync(resolve("packages/ethers/src.ts/index.ts"), output);
|
|
@ -0,0 +1,22 @@
|
|||
import { computeTarballHash, updateJson } from "../local";
|
||||
import { colorify, getProgressBar } from "../log";
|
||||
import { dirnames, getPackageJsonPath } from "../path";
|
||||
|
||||
(async function() {
|
||||
const progress = getProgressBar(colorify.bold("Updating package.json hashes"));
|
||||
|
||||
// Updating all tarball hashes now that versions have been updated
|
||||
for (let i = 0; i < dirnames.length; i++) {
|
||||
progress(i / dirnames.length);
|
||||
const dirname = dirnames[i];
|
||||
const tarballHash = computeTarballHash(dirname);
|
||||
//console.log(dirname, tarballHash);
|
||||
updateJson(getPackageJsonPath(dirname), { tarballHash });
|
||||
}
|
||||
|
||||
progress(1);
|
||||
})().catch((error) => {
|
||||
console.log(error);
|
||||
process.exit(1);
|
||||
});;
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
|
||||
import { dirnames } from "./path";
|
||||
import { getPackage, Package } from "./local";
|
||||
|
||||
class OrderedSet {
|
||||
_keys: Array<string>;
|
||||
_values: Record<string, boolean>
|
||||
|
||||
constructor() {
|
||||
this._keys = [ ];
|
||||
this._values = { };
|
||||
}
|
||||
|
||||
add(key: string): void {
|
||||
this._values[key] = true;
|
||||
this._keys = null;
|
||||
}
|
||||
|
||||
contains(key: string): boolean {
|
||||
return !!this._values[key];
|
||||
}
|
||||
|
||||
_sort(): void {
|
||||
if (this._keys != null) { return; }
|
||||
this._keys = Object.keys(this._values);
|
||||
this._keys.sort();
|
||||
}
|
||||
|
||||
get length(): number {
|
||||
this._sort();
|
||||
return this._keys.length;
|
||||
}
|
||||
|
||||
get(index: number): string {
|
||||
this._sort();
|
||||
return this._keys[index];
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
export function getOrdered(skipNobuild?: boolean): Array<string> {
|
||||
const packages: Record<string, Package > = { };
|
||||
const filenames: Record<string, string> = { };
|
||||
|
||||
// Maps packages to names to list of dependencies; { [ name:string]: Array<name: string> }
|
||||
const deps: Record<string, OrderedSet> = { };
|
||||
|
||||
let addDeps = (name: string, depends: Record<string, string>) => {
|
||||
Object.keys(depends).forEach((dep) => {
|
||||
// Not a package we manage
|
||||
if (packages[dep] == null) { return; }
|
||||
deps[name].add(dep);
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < dirnames.length; i++) {
|
||||
let dirname = dirnames[i];
|
||||
let info = getPackage(dirname);
|
||||
if (skipNobuild && info._ethers_nobuild) { continue; }
|
||||
packages[info.name] = info;
|
||||
filenames[info.name] = dirname;
|
||||
}
|
||||
|
||||
Object.keys(packages).forEach((name) => {
|
||||
let info = packages[name];
|
||||
deps[info.name] = new OrderedSet();
|
||||
addDeps(info.name, info.dependencies || { });
|
||||
addDeps(info.name, info.devDependencies || { });
|
||||
});
|
||||
|
||||
let ordered: Array<string> = [ ];
|
||||
let remaining = Object.keys(deps);
|
||||
|
||||
let isSatisfied = (name: string) => {
|
||||
for (let i = 0; i < deps[name].length; i++) {
|
||||
if (ordered.indexOf(deps[name].get(i)) === -1) { return false; }
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
while (remaining.length) {
|
||||
let bail = true;
|
||||
for (let i = 0; i < remaining.length; i++) {
|
||||
if (!isSatisfied(remaining[i])) { continue; }
|
||||
bail = false;
|
||||
ordered.push(remaining[i]);
|
||||
remaining.splice(i, 1);
|
||||
break;
|
||||
}
|
||||
|
||||
if (bail) {
|
||||
throw new Error("Nothing processed; circular dependencies...");
|
||||
}
|
||||
}
|
||||
|
||||
return ordered.map((name) => filenames[name]);
|
||||
}
|
||||
|
||||
export function sort(dirnames: Array<string>): void {
|
||||
let ordered = getOrdered();
|
||||
dirnames.sort((a, b) => {
|
||||
let ai = ordered.indexOf(getPackage(a).name);
|
||||
let bi = ordered.indexOf(getPackage(b).name);
|
||||
if (ai === -1 || bi === -1) {
|
||||
throw new Error("unknown dirname - " + [a, b].join(", "));
|
||||
}
|
||||
return ai - bi;
|
||||
});
|
||||
}
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
import http from "http";
|
||||
import https from "https";
|
||||
import { parse } from "url"
|
||||
|
||||
export type GetUrlResponse = {
|
||||
statusCode: number,
|
||||
statusMessage: string;
|
||||
headers: { [ key: string] : string };
|
||||
body: Uint8Array;
|
||||
};
|
||||
|
||||
export type Options = {
|
||||
method?: string,
|
||||
body?: Uint8Array
|
||||
headers?: { [ key: string] : string },
|
||||
};
|
||||
|
||||
function getResponse(request: http.ClientRequest): Promise<GetUrlResponse> {
|
||||
return new Promise((resolve, reject) => {
|
||||
request.once("response", (resp: http.IncomingMessage) => {
|
||||
const response: GetUrlResponse = {
|
||||
statusCode: resp.statusCode,
|
||||
statusMessage: resp.statusMessage,
|
||||
headers: Object.keys(resp.headers).reduce((accum, name) => {
|
||||
let value = resp.headers[name];
|
||||
if (Array.isArray(value)) {
|
||||
value = value.join(", ");
|
||||
}
|
||||
accum[name] = value;
|
||||
return accum;
|
||||
}, <{ [ name: string ]: string }>{ }),
|
||||
body: null
|
||||
};
|
||||
//resp.setEncoding("utf8");
|
||||
|
||||
resp.on("data", (chunk: Uint8Array) => {
|
||||
if (response.body == null) { response.body = new Uint8Array(0); }
|
||||
|
||||
const body = new Uint8Array(response.body.length + chunk.length);
|
||||
body.set(response.body, 0);
|
||||
body.set(chunk, response.body.length);
|
||||
|
||||
response.body = body;
|
||||
});
|
||||
|
||||
resp.on("end", () => {
|
||||
resolve(response);
|
||||
});
|
||||
|
||||
resp.on("error", (error) => {
|
||||
/* istanbul ignore next */
|
||||
(<any>error).response = response;
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
|
||||
request.on("error", (error) => { reject(error); });
|
||||
});
|
||||
}
|
||||
|
||||
// The URL.parse uses null instead of the empty string
|
||||
function nonnull(value: string): string {
|
||||
if (value == null) { return ""; }
|
||||
return value;
|
||||
}
|
||||
|
||||
export async function getUrl(href: string, options?: Options): Promise<GetUrlResponse> {
|
||||
if (options == null) { options = { }; }
|
||||
|
||||
// @TODO: Once we drop support for node 8, we can pass the href
|
||||
// firectly into request and skip adding the components
|
||||
// to this request object
|
||||
const url = parse(href);
|
||||
|
||||
const request = {
|
||||
protocol: nonnull(url.protocol),
|
||||
hostname: nonnull(url.hostname),
|
||||
port: nonnull(url.port),
|
||||
path: (nonnull(url.pathname) + nonnull(url.search)),
|
||||
|
||||
method: (options.method || "GET"),
|
||||
headers: (options.headers || { }),
|
||||
};
|
||||
|
||||
let req: http.ClientRequest = null;
|
||||
switch (nonnull(url.protocol)) {
|
||||
case "http:":
|
||||
req = http.request(request);
|
||||
break;
|
||||
case "https:":
|
||||
req = https.request(request);
|
||||
break;
|
||||
default:
|
||||
/* istanbul ignore next */
|
||||
throw new Error(`unsupported protocol ${ url.protocol }`);
|
||||
}
|
||||
|
||||
if (options.body) {
|
||||
req.write(Buffer.from(options.body));
|
||||
}
|
||||
req.end();
|
||||
|
||||
const response = await getResponse(req);
|
||||
return response;
|
||||
}
|
||||
|
|
@ -0,0 +1,145 @@
|
|||
import fs from "fs";
|
||||
|
||||
import { dirnames, getPackageJsonPath, getPackagePath, resolve } from "./path";
|
||||
import { run } from "./run";
|
||||
import { loadJson, saveJson, sha256, sortRecords } from "./utils";
|
||||
|
||||
export type Package = {
|
||||
dependencies: { [ name: string ]: string };
|
||||
devDependencies: { [ name: string ]: string };
|
||||
name: string;
|
||||
version: string;
|
||||
tarballHash: string;
|
||||
location: "remote" | "local";
|
||||
_ethers_nobuild: boolean;
|
||||
};
|
||||
|
||||
export function getPackage(name: string): Package {
|
||||
const value = loadJson(getPackageJsonPath(name));
|
||||
return {
|
||||
name: value.name,
|
||||
version: value.version,
|
||||
dependencies: (value.dependencies || { }),
|
||||
devDependencies: (value.dependencies || { }),
|
||||
location: "local",
|
||||
tarballHash: (value.tarballHash || null),
|
||||
_ethers_nobuild: !!value._ethers_nobuild,
|
||||
};
|
||||
}
|
||||
|
||||
export function updateJson(path: string, replace: Record<string, any>, sort?: boolean): void {
|
||||
const values = loadJson(path);
|
||||
|
||||
Object.keys(replace).forEach((key) => {
|
||||
const value = replace[key];
|
||||
if (value === undefined) {
|
||||
delete values[key];
|
||||
} else {
|
||||
values[key] = replace[key];
|
||||
}
|
||||
});
|
||||
|
||||
saveJson(path, values, !!sort);
|
||||
}
|
||||
|
||||
export function getDependencies(name?: string, filter?: (name: string) => boolean): Record<string, string> {
|
||||
if (name) {
|
||||
return sortRecords(getPackage(name).dependencies);
|
||||
}
|
||||
|
||||
// Find all versions for each package dependency
|
||||
const deps = dirnames.reduce((accum, dirname) => {
|
||||
const deps = getPackage(dirname).dependencies;
|
||||
Object.keys(deps).forEach((name) => {
|
||||
if (filter && !filter(name)) { return; }
|
||||
if (!accum[name]) { accum[name] = { }; }
|
||||
accum[name][deps[name]] = true;
|
||||
});
|
||||
return accum;
|
||||
}, <Record<string, Record<string, boolean>>>{});
|
||||
|
||||
// Make sure each package dependency only has 1 version
|
||||
return sortRecords(Object.keys(deps).reduce((accum, name) => {
|
||||
const versions = Object.keys(deps[name]);
|
||||
if (versions.length > 1) {
|
||||
throw new Error(`cannot depend on multiple versions for ${ JSON.stringify(name) }: ${ versions.map(v => JSON.stringify(v)).join(", ") }`);
|
||||
}
|
||||
accum[name] = versions[0];
|
||||
return accum;
|
||||
}, <Record<string, string>>{ }));
|
||||
}
|
||||
|
||||
export function getPackList(name: string): Array<string> {
|
||||
const result = run("npm", [ "pack", "--json", getPackagePath(name), "--dry-run" ]);
|
||||
if (!result.ok) {
|
||||
const error = new Error(`failed to run npm pack: ${ name }`);
|
||||
(<any>error).result = result;
|
||||
throw error;
|
||||
}
|
||||
return JSON.parse(result.stdout)[0].files.map((info: { path: string }) => info.path);
|
||||
}
|
||||
|
||||
/*
|
||||
export function getTarball(name: string): Buffer {
|
||||
const files = getPackList(name).map((name) => `./${ name }`);
|
||||
files.sort((a, b) => {
|
||||
|
||||
const compsA = a.split("/"), compsB = b.split("/");
|
||||
while (true) {
|
||||
const a = compsA.shift(), b = compsB.shift();
|
||||
if (a === b) { continue; }
|
||||
|
||||
if (compsA.length === 0 && compsB.length === 0) {
|
||||
if (a < b) { return -1; }
|
||||
if (a > b) { return 1; }
|
||||
break;
|
||||
}
|
||||
|
||||
if (compsA.length === 0) { return -1; }
|
||||
if (compsB.length === 0) { return 1; }
|
||||
|
||||
if (a < b) { return -1; }
|
||||
if (a > b) { return 1; }
|
||||
}
|
||||
|
||||
return 0;
|
||||
});
|
||||
|
||||
return tar.create({
|
||||
sync: true,
|
||||
cwd: getPackagePath(name),
|
||||
prefix: "package/",
|
||||
gzip: true,
|
||||
portable: true,
|
||||
// Provide a specific date in the 1980s for the benefit of zip,
|
||||
// which is confounded by files dated at the Unix epoch 0.
|
||||
mtime: new Date('1985-10-26T08:15:00.000Z'),
|
||||
}, files).read();
|
||||
}
|
||||
*/
|
||||
export function computeTarballHash(name: string): string {
|
||||
|
||||
// Sort the files to get a consistent hash
|
||||
const files = getPackList(name);
|
||||
files.sort();
|
||||
|
||||
// Compute the hash for each file
|
||||
const packageRoot = getPackagePath(name);
|
||||
const hashes = files.reduce((accum, filename) => {
|
||||
let content = fs.readFileSync(resolve(packageRoot, filename));
|
||||
|
||||
// The package.json includes the hash, so we need to nix it to get a consistent hash
|
||||
if (filename === "package.json") {
|
||||
const info = JSON.parse(content.toString());
|
||||
delete info.tarballHash;
|
||||
content = Buffer.from(JSON.stringify(info, null, 2));
|
||||
}
|
||||
|
||||
accum[filename] = sha256(content);
|
||||
return accum;
|
||||
}, <Record<string, string>>{ });
|
||||
|
||||
return sha256(Buffer.from("{" + files.map((filename) => {
|
||||
return `${ JSON.stringify(filename) }:"${ hashes[filename] }"`
|
||||
}).join(",") + "}"));
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
|
||||
// See: https://stackoverflow.com/questions/9781218/how-to-change-node-jss-console-font-color
|
||||
let disableColor = !(process.stdout.isTTY);
|
||||
|
||||
export function getProgressBar(action: string): (percent: number) => void {
|
||||
let lastProgress = -1;
|
||||
|
||||
return function(percent: number): void {
|
||||
const progress = Math.trunc(percent * 100);
|
||||
|
||||
if (disableColor) {
|
||||
if (lastProgress === -1) {
|
||||
console.log(action + "...");
|
||||
}
|
||||
lastProgress = progress;
|
||||
return;
|
||||
}
|
||||
|
||||
//process.stdin.setRawMode(false);
|
||||
//process.stdin.pause();
|
||||
|
||||
if (progress === lastProgress || lastProgress === 1) { return; }
|
||||
lastProgress = progress;
|
||||
|
||||
(<any>(process.stdout)).clearLine();
|
||||
(<any>(process.stdout)).cursorTo(0);
|
||||
process.stdout.write(action + "... " + progress + "%");
|
||||
|
||||
if (percent === 1) {
|
||||
process.stdout.write('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const colorSequences: Record<string, string> = {
|
||||
blue: "\x1b[34m",
|
||||
cyan: "\x1b[36m",
|
||||
green: "\x1b[32m",
|
||||
magenta: "\x1b[35m",
|
||||
red: "\x1b[31m",
|
||||
yellow: "\x1b[33m",
|
||||
bold: ""
|
||||
};
|
||||
|
||||
function getColor(color?: string): string {
|
||||
if (!color || color === "normal") { return "\x1b[0m"; }
|
||||
return "\x1b[1m" + colorSequences[color];
|
||||
}
|
||||
|
||||
export type ColorifyFunc = (text: string) => string;
|
||||
|
||||
function _colorify(format: string): ColorifyFunc {
|
||||
return function (text: string): string {
|
||||
if (disableColor) { return text; }
|
||||
return getColor(format) + text.replace(/[^ -~]+/g, "") + getColor();
|
||||
}
|
||||
}
|
||||
|
||||
export const colorify: { [ format: string ]: ColorifyFunc } = Object.freeze({
|
||||
bold: _colorify("bold"),
|
||||
|
||||
blue: _colorify("blue"),
|
||||
green: _colorify("green"),
|
||||
red: _colorify("red"),
|
||||
});
|
|
@ -0,0 +1,47 @@
|
|||
|
||||
import semver from "semver";
|
||||
|
||||
import { getUrl } from "./geturl";
|
||||
import { Package, getPackage as _getPackage } from "./local";
|
||||
|
||||
|
||||
const cache: Record<string, any> = { };
|
||||
|
||||
async function getPackageInfo(name: string): Promise<any> {
|
||||
// Convert dirname to package if needed
|
||||
name = _getPackage(name).name;
|
||||
|
||||
if (!cache[name]) {
|
||||
try {
|
||||
const result = await getUrl("http:/" + "/registry.npmjs.org/" + name);
|
||||
cache[name] = JSON.parse(Buffer.from(result.body).toString("utf8"));
|
||||
} catch (error) {
|
||||
if (error.status === 404) { return null; }
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
return cache[name] || null;
|
||||
}
|
||||
|
||||
export async function getPackage(name: string, version?: string): Promise<Package> {
|
||||
const infos = await getPackageInfo(name);
|
||||
if (infos == null) { return null; }
|
||||
|
||||
if (version == null) {
|
||||
const versions = Object.keys(infos.versions);
|
||||
versions.sort(semver.compare);
|
||||
version = versions.pop();
|
||||
}
|
||||
|
||||
const info = infos.versions[version];
|
||||
|
||||
return {
|
||||
dependencies: (info.dependencies || {}),
|
||||
devDependencies: (info.devDependencies || {}),
|
||||
location: "remote",
|
||||
name: info.name,
|
||||
tarballHash: info.tarballHash,
|
||||
version : info.version,
|
||||
_ethers_nobuild: !!info._ethers_nobuild,
|
||||
};
|
||||
}
|
|
@ -1,10 +1,15 @@
|
|||
import fs from "fs";
|
||||
import { resolve } from "path";
|
||||
import { resolve as _resolve } from "path";
|
||||
|
||||
export const root: string = resolve(__dirname, "../../../");
|
||||
export const root: string = _resolve(__dirname, "../../../");
|
||||
|
||||
const pathRootPackageJsonPath: string = resolve(root, "package.json");
|
||||
const pathPackages: string = resolve(root, "packages");
|
||||
export function resolve(...args: Array<string>): string {
|
||||
args.unshift(root);
|
||||
return _resolve.apply(null, args);
|
||||
}
|
||||
|
||||
const pathRootPackageJsonPath: string = resolve("package.json");
|
||||
const pathPackages: string = resolve("packages");
|
||||
|
||||
export const dirs = Object.freeze({
|
||||
rootPackageJsonPath: pathRootPackageJsonPath,
|
||||
|
@ -12,6 +17,7 @@ export const dirs = Object.freeze({
|
|||
root,
|
||||
});
|
||||
|
||||
|
||||
type PackageInfo = {
|
||||
dirname: string;
|
||||
packageName: string;
|
||||
|
@ -20,18 +26,11 @@ type PackageInfo = {
|
|||
version: string;
|
||||
};
|
||||
|
||||
export type Package = {
|
||||
dependencies: { [ name: string ]: string };
|
||||
devDependencies: { [ name: string ]: string };
|
||||
name: string;
|
||||
version: string;
|
||||
};
|
||||
|
||||
export const dirnames: ReadonlyArray<string> = Object.freeze(fs.readdirSync(dirs.packages));
|
||||
|
||||
const packageLookup = dirnames.reduce((accum, dirname) => {
|
||||
const packagePath = resolve(dirs.packages, dirname);
|
||||
const packageJsonPath = resolve(packagePath, "package.json");
|
||||
const packagePath = _resolve(dirs.packages, dirname);
|
||||
const packageJsonPath = _resolve(packagePath, "package.json");
|
||||
|
||||
const info = JSON.parse(fs.readFileSync(packageJsonPath).toString());
|
||||
const packageName = info.name;
|
||||
|
@ -46,16 +45,6 @@ const packageLookup = dirnames.reduce((accum, dirname) => {
|
|||
|
||||
export const packages: ReadonlyArray<string> = Object.freeze(dirnames.map((dirname) => packageLookup[dirname].packageName));
|
||||
|
||||
export function atomicWrite(path: string, value: string | Uint8Array): void {
|
||||
const tmp = resolve(dirs.root, ".atomic-tmp");
|
||||
fs.writeFileSync(tmp, value);
|
||||
fs.renameSync(tmp, path);
|
||||
}
|
||||
|
||||
export function loadJson(path: string): any {
|
||||
return JSON.parse(fs.readFileSync(path).toString());
|
||||
}
|
||||
|
||||
function getPackageInfo(name: string): PackageInfo {
|
||||
const value = packageLookup[name];
|
||||
if (!value) { throw new Error(`unknown package: ${ name }`); }
|
||||
|
@ -74,76 +63,6 @@ export function getPackageJsonPath(name: string): string {
|
|||
return getPackageInfo(name).packageJsonPath;
|
||||
}
|
||||
|
||||
export function getPackage(name: string): Package {
|
||||
const value = loadJson(getPackageJsonPath(name));
|
||||
return {
|
||||
name: value.name,
|
||||
version: value.version,
|
||||
dependencies: (value.dependencies || { }),
|
||||
devDependencies: (value.dependencies || { }),
|
||||
};
|
||||
}
|
||||
|
||||
function sortRecords(record: Record<string, any>): Record<string, any> {
|
||||
const keys = Object.keys(record);
|
||||
keys.sort();
|
||||
|
||||
return keys.reduce((accum, name) => {
|
||||
accum[name] = record[name];
|
||||
return accum;
|
||||
}, <Record<string, any>>{ });
|
||||
}
|
||||
|
||||
export function getDependencies(name?: string): Record<string, string> {
|
||||
if (name) {
|
||||
return sortRecords(getPackage(name).dependencies);
|
||||
}
|
||||
|
||||
// Find all versions for each package dependency
|
||||
const deps = dirnames.reduce((accum, dirname) => {
|
||||
const deps = getPackage(dirname).dependencies;
|
||||
Object.keys(deps).forEach((name) => {
|
||||
if (!accum[name]) { accum[name] = { }; }
|
||||
accum[name][deps[name]] = true;
|
||||
});
|
||||
return accum;
|
||||
}, <Record<string, Record<string, boolean>>>{});
|
||||
|
||||
// Make sure each package dependency only has 1 version
|
||||
return sortRecords(Object.keys(deps).reduce((accum, name) => {
|
||||
const versions = Object.keys(deps[name]);
|
||||
if (versions.length > 1) {
|
||||
throw new Error(`cannot depend on multiple versions for ${ JSON.stringify(name) }: ${ versions.map(v => JSON.stringify(v)).join(", ") }`);
|
||||
}
|
||||
accum[name] = versions[0];
|
||||
return accum;
|
||||
}, <Record<string, string>>{ }));
|
||||
}
|
||||
|
||||
export function isEthers(name: string) {
|
||||
return !!packageLookup[name];
|
||||
}
|
||||
|
||||
export function updateJson(path: string, replace: Record<string, any>, sort?: boolean): void {
|
||||
const values = loadJson(path);
|
||||
Object.keys(replace).forEach((key) => {
|
||||
values[key] = replace[key];
|
||||
});
|
||||
|
||||
let replacer: (key: string, value: any) => any = null;
|
||||
if (sort) {
|
||||
replacer = (key, value) => {
|
||||
if (typeof(value) === "object") {
|
||||
const keys = Object.keys(value);
|
||||
keys.sort();
|
||||
return keys.reduce((accum, key) => {
|
||||
accum[key] = value[key];
|
||||
return accum;
|
||||
}, <Record<string, any>>{});
|
||||
}
|
||||
return value;
|
||||
};
|
||||
}
|
||||
|
||||
atomicWrite(path, JSON.stringify(values, replacer, 2));
|
||||
}
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
import { spawnSync } from "child_process";
|
||||
|
||||
export type RunResult = {
|
||||
stderr: string | null;
|
||||
_stderr: string | Buffer;
|
||||
|
||||
stdout: string;
|
||||
_stdout: string | Buffer;
|
||||
|
||||
status: number;
|
||||
ok: boolean;
|
||||
};
|
||||
|
||||
export function run(progname: string, args?: Array<string>, currentWorkingDirectory?: string): RunResult {
|
||||
if (args == null) { args = [ ]; }
|
||||
|
||||
const options: any = { };
|
||||
if (currentWorkingDirectory) { options.cwd = currentWorkingDirectory; }
|
||||
const child = spawnSync(progname, args, options);
|
||||
|
||||
const result = {
|
||||
_stderr: child.stderr,
|
||||
stderr: (child.stderr.toString() || null),
|
||||
_stdout: child.stdout,
|
||||
stdout: child.stdout.toString(),
|
||||
status: child.status,
|
||||
ok: (child.stderr.length === 0 && child.status === 0)
|
||||
};
|
||||
|
||||
if (child.error) {
|
||||
(<any>(child.error)).result = result;
|
||||
throw child.error;
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
/*
|
||||
const result: RunResult = {
|
||||
stderr: null,
|
||||
_stderr: Buffer.from([]),
|
||||
stdout: null,
|
||||
_stdout: Buffer.from([]),
|
||||
status: null,
|
||||
ok: false,
|
||||
};
|
||||
|
||||
proc.stderr.on("data", (data) => {
|
||||
result._stderr = Buffer.concat([ result._stderr, data ]);
|
||||
});
|
||||
|
||||
proc.stdout.on("data", (data) => {
|
||||
result._stdout = Buffer.concat([ result._stdout, data ]);
|
||||
});
|
||||
|
||||
proc.on("error", (error) => {
|
||||
result.stderr = result._stderr.toString("utf8");
|
||||
result.stdout = result._stdout.toString("utf8");
|
||||
(<any>error).result = result;
|
||||
|
||||
console.log("Error:", error);
|
||||
|
||||
reject(error);
|
||||
});
|
||||
|
||||
proc.on("close", (code) => {
|
||||
result.stderr = result._stderr.toString("utf8");
|
||||
result.stdout = result._stdout.toString("utf8");
|
||||
result.status = code;
|
||||
result.ok = (result._stderr.length === 0 && code === 0);
|
||||
resolve(result);
|
||||
});
|
||||
});
|
||||
*/
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
import fs from "fs";
|
||||
import { resolve } from "path";
|
||||
|
||||
import { createHash } from "crypto";
|
||||
|
||||
export function repeat(char: string, length: number): string {
|
||||
if (char.length === 0) { return ""; }
|
||||
let output = char;
|
||||
while (output.length < length) { output = output + output; }
|
||||
return output.substring(0, length);
|
||||
}
|
||||
|
||||
export function sha256(content: Buffer): string {
|
||||
const hasher = createHash("sha256");
|
||||
hasher.update(content);
|
||||
return "0x" + hasher.digest("hex");
|
||||
}
|
||||
|
||||
export function sortRecords(record: Record<string, any>): Record<string, any> {
|
||||
const keys = Object.keys(record);
|
||||
keys.sort();
|
||||
|
||||
return keys.reduce((accum, name) => {
|
||||
accum[name] = record[name];
|
||||
return accum;
|
||||
}, <Record<string, any>>{ });
|
||||
}
|
||||
|
||||
export function atomicWrite(path: string, value: string | Uint8Array): void {
|
||||
const tmp = resolve(__dirname, "../../../.atomic-tmp");
|
||||
fs.writeFileSync(tmp, value);
|
||||
fs.renameSync(tmp, path);
|
||||
}
|
||||
|
||||
export function loadJson(path: string): any {
|
||||
return JSON.parse(fs.readFileSync(path).toString());
|
||||
}
|
||||
|
||||
export function saveJson(filename: string, data: any, sort?: boolean): any {
|
||||
|
||||
let replacer: (key: string, value: any) => any = undefined;
|
||||
if (sort) {
|
||||
replacer = (key, value) => {
|
||||
if (Array.isArray(value)) {
|
||||
// pass
|
||||
} else if (value && typeof(value) === "object") {
|
||||
const keys = Object.keys(value);
|
||||
keys.sort();
|
||||
return keys.reduce((accum, key) => {
|
||||
accum[key] = value[key];
|
||||
return accum;
|
||||
}, <Record<string, any>>{});
|
||||
}
|
||||
return value;
|
||||
};
|
||||
}
|
||||
|
||||
atomicWrite(filename, JSON.stringify(data, replacer, 2) + "\n");
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
|
||||
declare module "tar" {
|
||||
export type CreateOptions = {
|
||||
sync?: boolean,
|
||||
cwd?: string,
|
||||
prefix?: string,
|
||||
gzip?: boolean,
|
||||
portable?: boolean,
|
||||
mtime?: Date
|
||||
};
|
||||
|
||||
export interface Readable {
|
||||
read(): Buffer;
|
||||
}
|
||||
|
||||
export function create(options: CreateOptions, files: Array<string>): Readable;
|
||||
}
|
|
@ -23,6 +23,7 @@
|
|||
"noUnusedLocals": true
|
||||
},
|
||||
"include": [
|
||||
"./thirdparty.d.ts",
|
||||
"./src.ts/*.ts",
|
||||
"./src.ts/cmds/*.ts"
|
||||
],
|
||||
|
|
Loading…
Reference in New Issue