mirror of https://github.com/embarklabs/embark.git
refactor(@embark/coverage): move coverage module into own package
This commit also moves several utility methods into @embark/utils as needed.
This commit is contained in:
parent
8ca6419a4e
commit
57c3502f1f
|
@ -0,0 +1,4 @@
|
||||||
|
engine-strict = true
|
||||||
|
package-lock = false
|
||||||
|
save-exact = true
|
||||||
|
scripts-prepend-node-path = true
|
|
@ -0,0 +1,6 @@
|
||||||
|
# `embark-coverage`
|
||||||
|
|
||||||
|
> Code Coverage capabilities for Embark
|
||||||
|
|
||||||
|
Visit [embark.status.im](https://embark.status.im/) to get started with
|
||||||
|
[Embark](https://github.com/embark-framework/embark).
|
|
@ -0,0 +1,69 @@
|
||||||
|
{
|
||||||
|
"name": "embark-coverage",
|
||||||
|
"version": "4.1.0-beta.1",
|
||||||
|
"author": "Iuri Matias <iuri.matias@gmail.com>",
|
||||||
|
"contributors": [],
|
||||||
|
"description": "Code Coverage capabilities for Embark",
|
||||||
|
"homepage": "https://github.com/embark-framework/embark/tree/master/packages/embark-coverage#readme",
|
||||||
|
"bugs": "https://github.com/embark-framework/embark/issues",
|
||||||
|
"keywords": [
|
||||||
|
"blockchain",
|
||||||
|
"dapps",
|
||||||
|
"ethereum",
|
||||||
|
"ipfs",
|
||||||
|
"serverless",
|
||||||
|
"solc",
|
||||||
|
"solidity"
|
||||||
|
],
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": {
|
||||||
|
"directory": "packages/embark-coverage",
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/embark-framework/embark.git"
|
||||||
|
},
|
||||||
|
"main": "./dist/index.js",
|
||||||
|
"scripts": {
|
||||||
|
"build": "cross-env BABEL_ENV=node babel src --extensions \".ts\" --out-dir dist --root-mode upward --source-maps",
|
||||||
|
"ci": "npm run qa",
|
||||||
|
"clean": "npm run reset",
|
||||||
|
"lint": "npm-run-all lint:*",
|
||||||
|
"lint:ts": "tslint -c tslint.json \"src/**/*.ts\"",
|
||||||
|
"package": "npm pack",
|
||||||
|
"qa": "npm-run-all lint typecheck build package",
|
||||||
|
"reset": "npx rimraf dist embark-*.tgz package",
|
||||||
|
"start": "npm run watch",
|
||||||
|
"typecheck": "tsc",
|
||||||
|
"watch": "run-p watch:*",
|
||||||
|
"watch:build": "npm run build -- --verbose --watch",
|
||||||
|
"watch:typecheck": "npm run typecheck -- --preserveWatchOutput --watch"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/runtime-corejs2": "7.3.1",
|
||||||
|
"embark-core": "^4.1.0-beta.0",
|
||||||
|
"embark-utils": "^4.1.0-beta.0",
|
||||||
|
"fs-extra": "7.0.1",
|
||||||
|
"globule": "1.2.1",
|
||||||
|
"semver": "5.6.0",
|
||||||
|
"solidity-parser-antlr": "0.4.2",
|
||||||
|
"web3-eth-contract": "1.0.0-beta.37"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@babel/cli": "7.2.3",
|
||||||
|
"@babel/core": "7.2.2",
|
||||||
|
"@types/web3": "1.0.12",
|
||||||
|
"cross-env": "5.2.0",
|
||||||
|
"eslint": "5.7.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"rimraf": "2.6.3",
|
||||||
|
"tslint": "5.16.0",
|
||||||
|
"typescript": "3.4.5"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8.12.0",
|
||||||
|
"npm": ">=6.4.1",
|
||||||
|
"yarn": ">=1.12.3"
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { File } from "embark-utils";
|
||||||
|
import * as fs from "fs-extra";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
import parser, { LineColumn, Location } from "solidity-parser-antlr";
|
import parser, { LineColumn, Location } from "solidity-parser-antlr";
|
||||||
import { EventLog } from "web3/types";
|
import { EventLog } from "web3/types";
|
||||||
|
@ -9,9 +11,6 @@ import { InstrumentWalker } from "./instrumentWalker";
|
||||||
import { coverageContractsPath } from "./path";
|
import { coverageContractsPath } from "./path";
|
||||||
import { BranchType, Coverage } from "./types";
|
import { BranchType, Coverage } from "./types";
|
||||||
|
|
||||||
const File = require("../../core/file");
|
|
||||||
const fs = require("../../core/fs");
|
|
||||||
|
|
||||||
const STATEMENT_EVENT = "__StatementCoverage";
|
const STATEMENT_EVENT = "__StatementCoverage";
|
||||||
const POINT_FACTOR = 1000000000;
|
const POINT_FACTOR = 1000000000;
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
import { dappPath } from "embark-utils";
|
import { dappPath, File, removePureView } from "embark-utils";
|
||||||
import * as globule from "globule";
|
import * as globule from "globule";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
import Web3Contract from "web3/eth/contract";
|
import Web3Contract from "web3/eth/contract";
|
||||||
|
|
||||||
import { Contract, Embark } from "embark";
|
import { Contract, Embark } from "embark";
|
||||||
import { File } from "../../core/file";
|
|
||||||
import { removePureView } from "../../utils/solidity/code";
|
|
||||||
import { ContractEnhanced } from "./contractEnhanced";
|
import { ContractEnhanced } from "./contractEnhanced";
|
||||||
import { coverageContractsPath } from "./path";
|
import { coverageContractsPath } from "./path";
|
||||||
import { Coverage as ICoverage } from "./types";
|
import { Coverage as ICoverage } from "./types";
|
|
@ -1,5 +1,3 @@
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
const fs = require("../../core/fs");
|
|
||||||
|
|
||||||
export const coverageContractsPath = () => path.join("coverage", "instrumentedContracts");
|
export const coverageContractsPath = () => path.join("coverage", "instrumentedContracts");
|
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"extends": "../../tsconfig.json",
|
||||||
|
"include": ["src/**/*"]
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"extends": "../../tslint.json"
|
||||||
|
}
|
|
@ -1 +1,21 @@
|
||||||
declare module "embark-utils";
|
declare module "embark-utils" {
|
||||||
|
class File {
|
||||||
|
path: string;
|
||||||
|
constructor(options: any);
|
||||||
|
prepareForCompilation(isCoverage?: boolean): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
function compact(array: any): any;
|
||||||
|
function checkIsAvailable(url: string, callback: any): void;
|
||||||
|
function dockerHostSwap(host: string): string;
|
||||||
|
function dappPath(...names: string[]): string;
|
||||||
|
function escapeHtml(message: any): string;
|
||||||
|
function embarkPath(...names: string[]): string;
|
||||||
|
function exit(code?: any): void;
|
||||||
|
function findNextPort(port: number): Promise<number>;
|
||||||
|
function jsonFunctionReplacer(key: any, value: any): any;
|
||||||
|
function fuzzySearch(text: string, list: any, filter: any): any;
|
||||||
|
function getExternalContractUrl(file: string, provideUrl: string): string;
|
||||||
|
function recursiveMerge(target: any, source: any): any;
|
||||||
|
function removePureView(dir: string): void;
|
||||||
|
}
|
||||||
|
|
|
@ -52,6 +52,7 @@
|
||||||
"embark-i18n": "^4.1.0-beta.1",
|
"embark-i18n": "^4.1.0-beta.1",
|
||||||
"ethereumjs-wallet": "0.6.3",
|
"ethereumjs-wallet": "0.6.3",
|
||||||
"follow-redirects": "1.5.7",
|
"follow-redirects": "1.5.7",
|
||||||
|
"fs-extra": "7.0.1",
|
||||||
"fuzzy": "0.1.3",
|
"fuzzy": "0.1.3",
|
||||||
"merge": "1.2.1",
|
"merge": "1.2.1",
|
||||||
"multihashes": "0.4.14",
|
"multihashes": "0.4.14",
|
||||||
|
|
|
@ -11,3 +11,10 @@ export function recursiveMerge(target: any, source: any) {
|
||||||
export function compact(array: any) {
|
export function compact(array: any) {
|
||||||
return array.filter((n: any) => n);
|
return array.filter((n: any) => n);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function groupBy(array: any, key: any) {
|
||||||
|
return array.reduce((rv: any, x: any) => {
|
||||||
|
(rv[x[key]] = rv[x[key]] || []).push(x);
|
||||||
|
return rv;
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,177 @@
|
||||||
|
import { __ } from "embark-i18n";
|
||||||
|
import * as fs from "fs-extra";
|
||||||
|
import * as path from "path";
|
||||||
|
import { downloadFile } from "./network";
|
||||||
|
import { dappPath, embarkPath } from "./pathUtils";
|
||||||
|
import { ImportRemapping, prepareForCompilation } from "./solidity/remapImports";
|
||||||
|
|
||||||
|
const HTTP_CONTRACTS_DIRECTORY = ".embark/contracts/";
|
||||||
|
|
||||||
|
export enum Types {
|
||||||
|
embarkInternal = "embark_internal",
|
||||||
|
dappFile = "dapp_file",
|
||||||
|
custom = "custom",
|
||||||
|
http = "http",
|
||||||
|
}
|
||||||
|
|
||||||
|
export class File {
|
||||||
|
public type: Types;
|
||||||
|
public externalUrl: string = "";
|
||||||
|
public path = "";
|
||||||
|
public basedir: string;
|
||||||
|
public resolver: (callback: (content: string) => void) => void;
|
||||||
|
public pluginPath: string;
|
||||||
|
public storageConfig: any;
|
||||||
|
public providerUrl: string;
|
||||||
|
public importRemappings: ImportRemapping[] = [];
|
||||||
|
public originalPath: string;
|
||||||
|
|
||||||
|
constructor(options: any) {
|
||||||
|
this.type = options.type;
|
||||||
|
|
||||||
|
this.basedir = options.basedir || "";
|
||||||
|
this.resolver = options.resolver;
|
||||||
|
this.pluginPath = options.pluginPath ? options.pluginPath : "";
|
||||||
|
this.storageConfig = options.storageConfig;
|
||||||
|
this.providerUrl = "";
|
||||||
|
this.originalPath = options.originalPath || "";
|
||||||
|
|
||||||
|
if (this.type === Types.custom && this.pluginPath) {
|
||||||
|
this.path = path.join(this.pluginPath, options.path).replace(dappPath(), "");
|
||||||
|
if (this.path.startsWith("/")) {
|
||||||
|
this.path = this.path.substring(1);
|
||||||
|
}
|
||||||
|
} else if (this.type === Types.http) {
|
||||||
|
const external = getExternalContractUrl(options.externalUrl, this.providerUrl);
|
||||||
|
if (external !== null) {
|
||||||
|
this.externalUrl = external.url;
|
||||||
|
this.path = path.normalize(dappPath(external.filePath));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.path = path.normalize(options.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async prepareForCompilation(isCoverage = false) {
|
||||||
|
if (!this.path.endsWith(".sol")) {
|
||||||
|
return Promise.reject(__("This method is only supported for Solidity files"));
|
||||||
|
}
|
||||||
|
return prepareForCompilation(this, isCoverage);
|
||||||
|
}
|
||||||
|
|
||||||
|
public get content(): Promise<string> {
|
||||||
|
return new Promise<string>((resolve) => {
|
||||||
|
switch (this.type) {
|
||||||
|
case Types.embarkInternal: {
|
||||||
|
const content = fs.readFileSync(embarkPath(path.join("dist", this.path)), "utf-8");
|
||||||
|
return resolve(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
case Types.dappFile: {
|
||||||
|
const content = fs.readFileSync(this.path, "utf-8").toString();
|
||||||
|
return resolve(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
case Types.custom: {
|
||||||
|
return this.resolver((content: string) => {
|
||||||
|
resolve(content);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
case Types.http: {
|
||||||
|
fs.ensureFileSync(this.path);
|
||||||
|
return downloadFile(this.externalUrl, this.path, () => {
|
||||||
|
const content = fs.readFileSync(this.path, "utf-8");
|
||||||
|
resolve(content);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getExternalContractUrl(file: string, providerUrl: string) {
|
||||||
|
let url;
|
||||||
|
const RAW_URL = "https://raw.githubusercontent.com/";
|
||||||
|
const DEFAULT_SWARM_GATEWAY = "https://swarm-gateways.net/";
|
||||||
|
const MALFORMED_SWARM_ERROR = "Malformed Swarm gateway URL for ";
|
||||||
|
const MALFORMED_ERROR = "Malformed Github URL for ";
|
||||||
|
const MALFORMED_IPFS_ERROR = "Malformed IPFS URL for ";
|
||||||
|
const IPFS_GETURL_NOTAVAILABLE = "IPFS getUrl is not available. Please set it in your storage config. For more info: https://embark.status.im/docs/storage_configuration.html";
|
||||||
|
if (file.startsWith("https://github")) {
|
||||||
|
const file_path = file.match(/https:\/\/github\.[a-z]+\/(.*)/);
|
||||||
|
if (!file_path) {
|
||||||
|
console.error(MALFORMED_ERROR + file);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
url = `${RAW_URL}${file_path[1].replace("blob/", "")}`;
|
||||||
|
} else if (file.startsWith("ipfs")) {
|
||||||
|
if (!providerUrl) {
|
||||||
|
console.error(IPFS_GETURL_NOTAVAILABLE);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
let file_path = file.match(/ipfs:\/\/([-a-zA-Z0-9]+)\/(.*)/);
|
||||||
|
if (!file_path) {
|
||||||
|
file_path = file.match(/ipfs:\/\/([-a-zA-Z0-9]+)/);
|
||||||
|
if (!file_path) {
|
||||||
|
console.error(MALFORMED_IPFS_ERROR + file);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let matchResult = file_path[1];
|
||||||
|
if (file_path[2]) {
|
||||||
|
matchResult += "/" + file_path[2];
|
||||||
|
}
|
||||||
|
url = `${providerUrl}${matchResult}`;
|
||||||
|
return {
|
||||||
|
filePath: HTTP_CONTRACTS_DIRECTORY + matchResult,
|
||||||
|
url,
|
||||||
|
};
|
||||||
|
} else if (file.startsWith("git")) {
|
||||||
|
// Match values
|
||||||
|
// [0] entire input
|
||||||
|
// [1] git://
|
||||||
|
// [2] user
|
||||||
|
// [3] repository
|
||||||
|
// [4] path
|
||||||
|
// [5] branch
|
||||||
|
const file_path = file.match(
|
||||||
|
/(git:\/\/)?github\.[a-z]+\/([-a-zA-Z0-9@:%_+.~#?&=]+)\/([-a-zA-Z0-9@:%_+.~#?&=]+)\/([-a-zA-Z0-9@:%_+.~?\/&=]+)#?([a-zA-Z0-9\/_.-]*)?/,
|
||||||
|
);
|
||||||
|
if (!file_path) {
|
||||||
|
console.error(MALFORMED_ERROR + file);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
let branch = file_path[5];
|
||||||
|
if (!branch) {
|
||||||
|
branch = "master";
|
||||||
|
}
|
||||||
|
url = `${RAW_URL}${file_path[2]}/${file_path[3]}/${branch}/${file_path[4]}`;
|
||||||
|
} else if (file.startsWith("http")) {
|
||||||
|
url = file;
|
||||||
|
} else if (file.startsWith("bzz")) {
|
||||||
|
if (!providerUrl) {
|
||||||
|
url = DEFAULT_SWARM_GATEWAY + file;
|
||||||
|
} else {
|
||||||
|
let file_path = file.match(/bzz:\/([-a-zA-Z0-9]+)\/(.*)/);
|
||||||
|
if (!file_path) {
|
||||||
|
file_path = file.match(/bzz:\/([-a-zA-Z0-9]+)/);
|
||||||
|
if (!file_path) {
|
||||||
|
console.log(MALFORMED_SWARM_ERROR + file);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
url = providerUrl + "/" + file;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const match = url.match(
|
||||||
|
/\.[a-z]+\/([-a-zA-Z0-9@:%_+.~#?&\/=]+)/,
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
filePath: HTTP_CONTRACTS_DIRECTORY + (match !== null ? match[1] : ""),
|
||||||
|
url,
|
||||||
|
};
|
||||||
|
}
|
|
@ -1,12 +1,10 @@
|
||||||
const path = require('path');
|
|
||||||
const os = require('os');
|
|
||||||
const http = require('follow-redirects').http;
|
const http = require('follow-redirects').http;
|
||||||
const https = require('follow-redirects').https;
|
const https = require('follow-redirects').https;
|
||||||
const shelljs = require('shelljs');
|
const shelljs = require('shelljs');
|
||||||
const clipboardy = require('clipboardy');
|
const clipboardy = require('clipboardy');
|
||||||
|
|
||||||
const {canonicalHost, defaultCorsHost, defaultHost, dockerHostSwap, isDocker} = require('./host');
|
const {canonicalHost, defaultCorsHost, defaultHost, dockerHostSwap, isDocker} = require('./host');
|
||||||
const {findNextPort} = require('./network');
|
const { findNextPort, downloadFile } = require('./network');
|
||||||
const logUtils = require('./log-utils');
|
const logUtils = require('./log-utils');
|
||||||
const toposortGraph = require('./toposort');
|
const toposortGraph = require('./toposort');
|
||||||
import { unitRegex } from './constants';
|
import { unitRegex } from './constants';
|
||||||
|
@ -17,6 +15,7 @@ import {
|
||||||
hexToNumber,
|
hexToNumber,
|
||||||
decodeParams,
|
decodeParams,
|
||||||
sha3,
|
sha3,
|
||||||
|
sha512,
|
||||||
isHex,
|
isHex,
|
||||||
soliditySha3,
|
soliditySha3,
|
||||||
toChecksumAddress
|
toChecksumAddress
|
||||||
|
@ -24,10 +23,14 @@ import {
|
||||||
import { getAddressToContract, getTransactionParams } from './transactionUtils';
|
import { getAddressToContract, getTransactionParams } from './transactionUtils';
|
||||||
import LongRunningProcessTimer from './longRunningProcessTimer';
|
import LongRunningProcessTimer from './longRunningProcessTimer';
|
||||||
import AccountParser from './accountParser';
|
import AccountParser from './accountParser';
|
||||||
|
import { dappPath, embarkPath, ipcPath, joinPath, tmpDir, urlJoin } from './pathUtils';
|
||||||
|
|
||||||
const { extendZeroAddressShorthand, replaceZeroAddressShorthand } = AddressUtils;
|
const { extendZeroAddressShorthand, replaceZeroAddressShorthand } = AddressUtils;
|
||||||
|
|
||||||
import { compact, last, recursiveMerge } from './collections';
|
import { compact, last, recursiveMerge, groupBy } from './collections';
|
||||||
|
import { prepareForCompilation } from './solidity/remapImports';
|
||||||
|
import { removePureView } from './solidity/code';
|
||||||
|
import { File, getExternalContractUrl, Types } from './file';
|
||||||
|
|
||||||
function timer(ms) {
|
function timer(ms) {
|
||||||
const then = Date.now();
|
const then = Date.now();
|
||||||
|
@ -60,15 +63,6 @@ function hashTo32ByteHexString(hash) {
|
||||||
return '0x' + multihash.toHexString(digest);
|
return '0x' + multihash.toHexString(digest);
|
||||||
}
|
}
|
||||||
|
|
||||||
function sha512(arg) {
|
|
||||||
if (typeof arg !== 'string') {
|
|
||||||
throw new TypeError('argument must be a string');
|
|
||||||
}
|
|
||||||
const crypto = require('crypto');
|
|
||||||
const hash = crypto.createHash('sha512');
|
|
||||||
return hash.update(arg).digest('hex');
|
|
||||||
}
|
|
||||||
|
|
||||||
function exit(code) {
|
function exit(code) {
|
||||||
process.exit(code);
|
process.exit(code);
|
||||||
}
|
}
|
||||||
|
@ -239,12 +233,6 @@ function buildUrlFromConfig(configObj) {
|
||||||
return buildUrl(configObj.protocol, canonicalHost(configObj.host), configObj.port, configObj.type);
|
return buildUrl(configObj.protocol, canonicalHost(configObj.host), configObj.port, configObj.type);
|
||||||
}
|
}
|
||||||
|
|
||||||
function joinPath() {
|
|
||||||
return path.join.apply(path.join, arguments);
|
|
||||||
}
|
|
||||||
|
|
||||||
function tmpDir(...args) { return joinPath(os.tmpdir(), ...args); }
|
|
||||||
|
|
||||||
function errorMessage(e) {
|
function errorMessage(e) {
|
||||||
if (typeof e === 'string') {
|
if (typeof e === 'string') {
|
||||||
return e;
|
return e;
|
||||||
|
@ -254,32 +242,6 @@ function errorMessage(e) {
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
|
|
||||||
function dappPath(...names) {
|
|
||||||
const DAPP_PATH = process.env.DAPP_PATH || process.cwd();
|
|
||||||
return path.join(DAPP_PATH, ...names);
|
|
||||||
}
|
|
||||||
|
|
||||||
function ipcPath(basename, usePipePathOnWindows = false) {
|
|
||||||
if (!(basename && typeof basename === 'string')) {
|
|
||||||
throw new TypeError('first argument must be a non-empty string');
|
|
||||||
}
|
|
||||||
if (process.platform === 'win32' && usePipePathOnWindows) {
|
|
||||||
return `\\\\.\\pipe\\${basename}`;
|
|
||||||
}
|
|
||||||
return joinPath(
|
|
||||||
tmpDir(`embark-${sha512(dappPath()).slice(0, 8)}`),
|
|
||||||
basename
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function embarkPath(...names) {
|
|
||||||
const EMBARK_PATH = process.env.EMBARK_PATH;
|
|
||||||
if (!EMBARK_PATH) {
|
|
||||||
throw new Error('environment variable EMBARK_PATH was not set');
|
|
||||||
}
|
|
||||||
return path.join(EMBARK_PATH, ...names);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
const Utils = {
|
const Utils = {
|
||||||
buildUrl,
|
buildUrl,
|
||||||
|
@ -288,6 +250,7 @@ const Utils = {
|
||||||
tmpDir,
|
tmpDir,
|
||||||
ipcPath,
|
ipcPath,
|
||||||
dappPath,
|
dappPath,
|
||||||
|
downloadFile,
|
||||||
embarkPath,
|
embarkPath,
|
||||||
jsonFunctionReplacer,
|
jsonFunctionReplacer,
|
||||||
fuzzySearch,
|
fuzzySearch,
|
||||||
|
@ -305,6 +268,7 @@ const Utils = {
|
||||||
getTransactionParams,
|
getTransactionParams,
|
||||||
isDocker,
|
isDocker,
|
||||||
checkIsAvailable,
|
checkIsAvailable,
|
||||||
|
File,
|
||||||
findNextPort,
|
findNextPort,
|
||||||
fileTreeSort,
|
fileTreeSort,
|
||||||
hashTo32ByteHexString,
|
hashTo32ByteHexString,
|
||||||
|
@ -316,15 +280,21 @@ const Utils = {
|
||||||
prepareContractsConfig,
|
prepareContractsConfig,
|
||||||
getWeiBalanceFromString,
|
getWeiBalanceFromString,
|
||||||
getHexBalanceFromString,
|
getHexBalanceFromString,
|
||||||
|
getExternalContractUrl,
|
||||||
|
groupBy,
|
||||||
sha512,
|
sha512,
|
||||||
sha3,
|
sha3,
|
||||||
timer,
|
timer,
|
||||||
|
Types,
|
||||||
unitRegex,
|
unitRegex,
|
||||||
|
urlJoin,
|
||||||
|
removePureView,
|
||||||
runCmd,
|
runCmd,
|
||||||
escapeHtml: logUtils.escapeHtml,
|
escapeHtml: logUtils.escapeHtml,
|
||||||
normalizeInput: logUtils.normalizeInput,
|
normalizeInput: logUtils.normalizeInput,
|
||||||
LogHandler: require('./logHandler'),
|
LogHandler: require('./logHandler'),
|
||||||
LongRunningProcessTimer,
|
LongRunningProcessTimer,
|
||||||
|
prepareForCompilation,
|
||||||
proposeAlternative,
|
proposeAlternative,
|
||||||
toChecksumAddress,
|
toChecksumAddress,
|
||||||
toposort,
|
toposort,
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
|
import * as fs from "fs-extra";
|
||||||
import * as net from "net";
|
import * as net from "net";
|
||||||
|
const http = require("follow-redirects").http;
|
||||||
|
const https = require("follow-redirects").https;
|
||||||
|
|
||||||
export function findNextPort(port: number) {
|
export function findNextPort(port: number) {
|
||||||
const server = net.createServer();
|
const server = net.createServer();
|
||||||
|
@ -8,3 +11,21 @@ export function findNextPort(port: number) {
|
||||||
server.listen(port, () => server.close());
|
server.listen(port, () => server.close());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function downloadFile(url: string, dest: string, cb: any) {
|
||||||
|
const file = fs.createWriteStream(dest);
|
||||||
|
(url.substring(0, 5) === "https" ? https : http).get(url, (response: any) => {
|
||||||
|
if (response.statusCode !== 200) {
|
||||||
|
cb(`Download failed, response code ${response.statusCode}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
response.pipe(file);
|
||||||
|
file.on("finish", () => {
|
||||||
|
file.close();
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
}).on("error", (err: Error) => {
|
||||||
|
fs.unlink(dest);
|
||||||
|
cb(err.message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,50 @@
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as os from 'os';
|
||||||
|
import { sha512 } from './web3Utils';
|
||||||
|
|
||||||
|
export function joinPath() {
|
||||||
|
return path.join.apply(path.join, arguments);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function tmpDir(...args) { return joinPath(os.tmpdir(), ...args); }
|
||||||
|
|
||||||
|
export function dappPath(...names) {
|
||||||
|
return path.join(process.env.DAPP_PATH || process.cwd(), ...names);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function ipcPath(basename, usePipePathOnWindows = false) {
|
||||||
|
if (!(basename && typeof basename === 'string')) {
|
||||||
|
throw new TypeError('first argument must be a non-empty string');
|
||||||
|
}
|
||||||
|
if (process.platform === 'win32' && usePipePathOnWindows) {
|
||||||
|
return `\\\\.\\pipe\\${basename}`;
|
||||||
|
}
|
||||||
|
return joinPath(
|
||||||
|
tmpDir(`embark-${sha512(dappPath()).slice(0, 8)}`),
|
||||||
|
basename
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function embarkPath(...names) {
|
||||||
|
const EMBARK_PATH = process.env.EMBARK_PATH;
|
||||||
|
if (!EMBARK_PATH) {
|
||||||
|
throw new Error('environment variable EMBARK_PATH was not set');
|
||||||
|
}
|
||||||
|
return path.join(EMBARK_PATH, ...names);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function urlJoin(url, path) {
|
||||||
|
let urlChunks = url.split('/');
|
||||||
|
let levels = path.split('../');
|
||||||
|
|
||||||
|
// remove relative path parts from end of url
|
||||||
|
urlChunks = urlChunks.slice(0, urlChunks.length - levels.length);
|
||||||
|
|
||||||
|
// remove relative path parts from start of match
|
||||||
|
levels.splice(0, levels.length - 1);
|
||||||
|
|
||||||
|
// add on our match so we can join later
|
||||||
|
urlChunks = urlChunks.concat(levels.join().replace('./', ''));
|
||||||
|
|
||||||
|
return urlChunks.join('/');
|
||||||
|
}
|
|
@ -1,8 +1,7 @@
|
||||||
|
import * as fs from "fs-extra";
|
||||||
import * as globule from "globule";
|
import * as globule from "globule";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
const fs = require("../../core/fs");
|
|
||||||
|
|
||||||
export const removePureView = (dir: string) => {
|
export const removePureView = (dir: string) => {
|
||||||
globule.find(path.join(dir, "**/*.sol")).forEach((filepath) => {
|
globule.find(path.join(dir, "**/*.sol")).forEach((filepath) => {
|
||||||
let source = fs.readFileSync(filepath, "utf-8");
|
let source = fs.readFileSync(filepath, "utf-8");
|
|
@ -1,11 +1,10 @@
|
||||||
import { dappPath, embarkPath } from "embark-utils";
|
import * as fs from "fs-extra";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
import { File, Types } from "../../core/file";
|
import { groupBy } from "../collections";
|
||||||
|
import { File, Types } from "../file";
|
||||||
|
import { dappPath, embarkPath, urlJoin } from "../pathUtils";
|
||||||
import { removePureView, replacePureView } from "./code";
|
import { removePureView, replacePureView } from "./code";
|
||||||
|
|
||||||
const { urlJoin, groupBy } = require("../../utils/utils");
|
|
||||||
const fs = require("../../core/fs");
|
|
||||||
|
|
||||||
const FIND_IMPORTS_REGEX = /^import[\s]*(['"])(.*)\1;/gm;
|
const FIND_IMPORTS_REGEX = /^import[\s]*(['"])(.*)\1;/gm;
|
||||||
const FIND_FILE_REGEX = /import[\s]*(['"])(.*)\1;/;
|
const FIND_FILE_REGEX = /import[\s]*(['"])(.*)\1;/;
|
||||||
|
|
|
@ -44,6 +44,15 @@ export function sha3(arg: any) {
|
||||||
return web3.utils.sha3(arg);
|
return web3.utils.sha3(arg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function sha512(arg: string) {
|
||||||
|
if (typeof arg !== "string") {
|
||||||
|
throw new TypeError("argument must be a string");
|
||||||
|
}
|
||||||
|
const crypto = require("crypto");
|
||||||
|
const hash = crypto.createHash("sha512");
|
||||||
|
return hash.update(arg).digest("hex");
|
||||||
|
}
|
||||||
|
|
||||||
export function isHex(hex: string) {
|
export function isHex(hex: string) {
|
||||||
return web3.utils.isHex(hex);
|
return web3.utils.isHex(hex);
|
||||||
}
|
}
|
||||||
|
|
|
@ -102,6 +102,7 @@
|
||||||
"embark-console-listener": "^4.1.0-beta.1",
|
"embark-console-listener": "^4.1.0-beta.1",
|
||||||
"embark-contracts-manager": "^4.1.0-beta.1",
|
"embark-contracts-manager": "^4.1.0-beta.1",
|
||||||
"embark-core": "^4.1.0-beta.1",
|
"embark-core": "^4.1.0-beta.1",
|
||||||
|
"embark-coverage": "^4.1.0-beta.1",
|
||||||
"embark-debugger": "^4.1.0-beta.1",
|
"embark-debugger": "^4.1.0-beta.1",
|
||||||
"embark-deploy-tracker": "^4.1.0-beta.1",
|
"embark-deploy-tracker": "^4.1.0-beta.1",
|
||||||
"embark-deployment": "^4.1.0-beta.1",
|
"embark-deployment": "^4.1.0-beta.1",
|
||||||
|
|
|
@ -11,15 +11,17 @@ import {
|
||||||
canonicalHost,
|
canonicalHost,
|
||||||
dappPath,
|
dappPath,
|
||||||
defaultHost,
|
defaultHost,
|
||||||
|
File,
|
||||||
|
Types,
|
||||||
recursiveMerge,
|
recursiveMerge,
|
||||||
AddressUtils,
|
AddressUtils,
|
||||||
unitRegex,
|
unitRegex,
|
||||||
getWeiBalanceFromString,
|
getWeiBalanceFromString,
|
||||||
prepareContractsConfig
|
prepareContractsConfig,
|
||||||
|
getExternalContractUrl
|
||||||
} from 'embark-utils';
|
} from 'embark-utils';
|
||||||
const cloneDeep = require('lodash.clonedeep');
|
const cloneDeep = require('lodash.clonedeep');
|
||||||
const { replaceZeroAddressShorthand } = AddressUtils;
|
const { replaceZeroAddressShorthand } = AddressUtils;
|
||||||
import { File, Types } from "./file";
|
|
||||||
|
|
||||||
const DEFAULT_CONFIG_PATH = 'config/';
|
const DEFAULT_CONFIG_PATH = 'config/';
|
||||||
const PACKAGE = require('../../../package.json');
|
const PACKAGE = require('../../../package.json');
|
||||||
|
@ -404,7 +406,7 @@ Config.prototype.loadExternalContractsFiles = function() {
|
||||||
let externalContractFile = null;
|
let externalContractFile = null;
|
||||||
|
|
||||||
if (contract.file.startsWith('http') || contract.file.startsWith('git') || contract.file.startsWith('ipfs') || contract.file.startsWith('bzz')) {
|
if (contract.file.startsWith('http') || contract.file.startsWith('git') || contract.file.startsWith('ipfs') || contract.file.startsWith('bzz')) {
|
||||||
const fileObj = utils.getExternalContractUrl(contract.file, this.providerUrl);
|
const fileObj = getExternalContractUrl(contract.file, this.providerUrl);
|
||||||
if (!fileObj) {
|
if (!fileObj) {
|
||||||
return this.logger.error(__("HTTP contract file not found") + ": " + contract.file);
|
return this.logger.error(__("HTTP contract file not found") + ": " + contract.file);
|
||||||
}
|
}
|
||||||
|
|
|
@ -320,7 +320,7 @@ class Engine {
|
||||||
}
|
}
|
||||||
|
|
||||||
codeCoverageService(_options) {
|
codeCoverageService(_options) {
|
||||||
this.registerModule('coverage');
|
this.registerModulePackage('embark-coverage');
|
||||||
}
|
}
|
||||||
|
|
||||||
testRunnerService(options) {
|
testRunnerService(options) {
|
||||||
|
|
|
@ -1,89 +0,0 @@
|
||||||
import { __ } from "embark-i18n";
|
|
||||||
import { dappPath, embarkPath } from "embark-utils";
|
|
||||||
import * as path from "path";
|
|
||||||
import { ImportRemapping, prepareForCompilation } from "../utils/solidity/remapImports";
|
|
||||||
|
|
||||||
const fs = require("./fs.js");
|
|
||||||
const utils = require("../utils/utils");
|
|
||||||
|
|
||||||
export enum Types {
|
|
||||||
embarkInternal = "embark_internal",
|
|
||||||
dappFile = "dapp_file",
|
|
||||||
custom = "custom",
|
|
||||||
http = "http",
|
|
||||||
}
|
|
||||||
|
|
||||||
export class File {
|
|
||||||
public type: Types;
|
|
||||||
public externalUrl: string = "";
|
|
||||||
public path: string;
|
|
||||||
public basedir: string;
|
|
||||||
public resolver: (callback: (content: string) => void) => void;
|
|
||||||
public pluginPath: string;
|
|
||||||
public storageConfig: any;
|
|
||||||
public providerUrl: string;
|
|
||||||
public importRemappings: ImportRemapping[] = [];
|
|
||||||
public originalPath: string;
|
|
||||||
|
|
||||||
constructor(options: any) {
|
|
||||||
this.type = options.type;
|
|
||||||
|
|
||||||
this.basedir = options.basedir || "";
|
|
||||||
this.resolver = options.resolver;
|
|
||||||
this.pluginPath = options.pluginPath ? options.pluginPath : "";
|
|
||||||
this.storageConfig = options.storageConfig;
|
|
||||||
this.providerUrl = "";
|
|
||||||
this.originalPath = options.originalPath || "";
|
|
||||||
|
|
||||||
if (this.type === Types.custom && this.pluginPath) {
|
|
||||||
this.path = path.join(this.pluginPath, options.path).replace(dappPath(), "");
|
|
||||||
if (this.path.startsWith("/")) {
|
|
||||||
this.path = this.path.substring(1);
|
|
||||||
}
|
|
||||||
} else if (this.type === Types.http) {
|
|
||||||
const external = utils.getExternalContractUrl(options.externalUrl, this.providerUrl);
|
|
||||||
this.externalUrl = external.url;
|
|
||||||
this.path = path.normalize(dappPath(external.filePath));
|
|
||||||
} else {
|
|
||||||
this.path = path.normalize(options.path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public async prepareForCompilation(isCoverage = false) {
|
|
||||||
if (!this.path.endsWith(".sol")) {
|
|
||||||
return Promise.reject(__("This method is only supported for Solidity files"));
|
|
||||||
}
|
|
||||||
return prepareForCompilation(this, isCoverage);
|
|
||||||
}
|
|
||||||
|
|
||||||
public get content(): Promise<string> {
|
|
||||||
return new Promise<string>((resolve) => {
|
|
||||||
switch (this.type) {
|
|
||||||
case Types.embarkInternal: {
|
|
||||||
const content = fs.readFileSync(embarkPath(path.join("dist", this.path)), "utf-8");
|
|
||||||
return resolve(content);
|
|
||||||
}
|
|
||||||
|
|
||||||
case Types.dappFile: {
|
|
||||||
const content = fs.readFileSync(this.path, "utf-8").toString();
|
|
||||||
return resolve(content);
|
|
||||||
}
|
|
||||||
|
|
||||||
case Types.custom: {
|
|
||||||
return this.resolver((content: string) => {
|
|
||||||
resolve(content);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
case Types.http: {
|
|
||||||
fs.ensureFileSync(this.path);
|
|
||||||
return utils.downloadFile(this.externalUrl, this.path, () => {
|
|
||||||
const content = fs.readFileSync(this.path, "utf-8");
|
|
||||||
resolve(content);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -3,7 +3,7 @@ const findUp = require('find-up');
|
||||||
const fs = require('../core/fs.js');
|
const fs = require('../core/fs.js');
|
||||||
const hostedGitInfo = require('hosted-git-info');
|
const hostedGitInfo = require('hosted-git-info');
|
||||||
const utils = require('./utils.js');
|
const utils = require('./utils.js');
|
||||||
import { embarkPath, joinPath, runCmd, errorMessage } from 'embark-utils';
|
import { embarkPath, downloadFile, joinPath, runCmd, errorMessage } from 'embark-utils';
|
||||||
const semver = require('semver');
|
const semver = require('semver');
|
||||||
const {promisify} = require('util');
|
const {promisify} = require('util');
|
||||||
const {execSync} = require('child_process');
|
const {execSync} = require('child_process');
|
||||||
|
@ -34,7 +34,7 @@ class TemplateGenerator {
|
||||||
console.log(__('Downloading template...').green);
|
console.log(__('Downloading template...').green);
|
||||||
fs.mkdirpSync(utils.dirname(tmpFilePath));
|
fs.mkdirpSync(utils.dirname(tmpFilePath));
|
||||||
try {
|
try {
|
||||||
await promisify(utils.downloadFile)(url, tmpFilePath);
|
await promisify(downloadFile)(url, tmpFilePath);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(errorMessage(e).red);
|
console.error(errorMessage(e).red);
|
||||||
throw e;
|
throw e;
|
||||||
|
|
|
@ -78,24 +78,6 @@ function sed(file, pattern, replace) {
|
||||||
shelljs.sed('-i', pattern, replace, file);
|
shelljs.sed('-i', pattern, replace, file);
|
||||||
}
|
}
|
||||||
|
|
||||||
function downloadFile(url, dest, cb) {
|
|
||||||
const o_fs = require('fs-extra');
|
|
||||||
var file = o_fs.createWriteStream(dest);
|
|
||||||
(url.substring(0, 5) === 'https' ? https : http).get(url, function (response) {
|
|
||||||
if (response.statusCode !== 200) {
|
|
||||||
cb(`Download failed, response code ${response.statusCode}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
response.pipe(file);
|
|
||||||
file.on('finish', function () {
|
|
||||||
file.close(cb);
|
|
||||||
});
|
|
||||||
}).on('error', function (err) {
|
|
||||||
o_fs.unlink(dest);
|
|
||||||
cb(err.message);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractTar(filename, packageDirectory, cb) {
|
function extractTar(filename, packageDirectory, cb) {
|
||||||
const o_fs = require('fs-extra');
|
const o_fs = require('fs-extra');
|
||||||
const tar = require('tar');
|
const tar = require('tar');
|
||||||
|
@ -117,92 +99,6 @@ function extractZip(filename, packageDirectory, opts, cb) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function getExternalContractUrl(file,providerUrl) {
|
|
||||||
const constants = require('embark-core/constants');
|
|
||||||
let url;
|
|
||||||
const RAW_URL = 'https://raw.githubusercontent.com/';
|
|
||||||
const DEFAULT_SWARM_GATEWAY = 'https://swarm-gateways.net/';
|
|
||||||
const MALFORMED_SWARM_ERROR = 'Malformed Swarm gateway URL for ';
|
|
||||||
const MALFORMED_ERROR = 'Malformed Github URL for ';
|
|
||||||
const MALFORMED_IPFS_ERROR = 'Malformed IPFS URL for ';
|
|
||||||
const IPFS_GETURL_NOTAVAILABLE = 'IPFS getUrl is not available. Please set it in your storage config. For more info: https://embark.status.im/docs/storage_configuration.html';
|
|
||||||
if (file.startsWith('https://github')) {
|
|
||||||
const match = file.match(/https:\/\/github\.[a-z]+\/(.*)/);
|
|
||||||
if (!match) {
|
|
||||||
console.error(MALFORMED_ERROR + file);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
url = `${RAW_URL}${match[1].replace('blob/', '')}`;
|
|
||||||
} else if (file.startsWith('ipfs')) {
|
|
||||||
if(!providerUrl) {
|
|
||||||
console.error(IPFS_GETURL_NOTAVAILABLE);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
let match = file.match(/ipfs:\/\/([-a-zA-Z0-9]+)\/(.*)/);
|
|
||||||
if(!match) {
|
|
||||||
match = file.match(/ipfs:\/\/([-a-zA-Z0-9]+)/);
|
|
||||||
if(!match) {
|
|
||||||
console.error(MALFORMED_IPFS_ERROR + file);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let matchResult = match[1];
|
|
||||||
if(match[2]) {
|
|
||||||
matchResult += '/' + match[2];
|
|
||||||
}
|
|
||||||
url = `${providerUrl}${matchResult}`;
|
|
||||||
return {
|
|
||||||
url,
|
|
||||||
filePath: constants.httpContractsDirectory + matchResult
|
|
||||||
};
|
|
||||||
} else if (file.startsWith('git')) {
|
|
||||||
// Match values
|
|
||||||
// [0] entire input
|
|
||||||
// [1] git://
|
|
||||||
// [2] user
|
|
||||||
// [3] repository
|
|
||||||
// [4] path
|
|
||||||
// [5] branch
|
|
||||||
const match = file.match(
|
|
||||||
/(git:\/\/)?github\.[a-z]+\/([-a-zA-Z0-9@:%_+.~#?&=]+)\/([-a-zA-Z0-9@:%_+.~#?&=]+)\/([-a-zA-Z0-9@:%_+.~?\/&=]+)#?([a-zA-Z0-9\/_.-]*)?/
|
|
||||||
);
|
|
||||||
if (!match) {
|
|
||||||
console.error(MALFORMED_ERROR + file);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
let branch = match[5];
|
|
||||||
if (!branch) {
|
|
||||||
branch = 'master';
|
|
||||||
}
|
|
||||||
url = `${RAW_URL}${match[2]}/${match[3]}/${branch}/${match[4]}`;
|
|
||||||
} else if (file.startsWith('http')) {
|
|
||||||
url = file;
|
|
||||||
} else if(file.startsWith('bzz')){
|
|
||||||
if(!providerUrl) {
|
|
||||||
url = DEFAULT_SWARM_GATEWAY + file;
|
|
||||||
} else {
|
|
||||||
let match = file.match(/bzz:\/([-a-zA-Z0-9]+)\/(.*)/);
|
|
||||||
if(!match){
|
|
||||||
match = file.match(/bzz:\/([-a-zA-Z0-9]+)/);
|
|
||||||
if(!match){
|
|
||||||
console.log(MALFORMED_SWARM_ERROR + file);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
url = providerUrl + '/' + file;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
const match = url.match(
|
|
||||||
/\.[a-z]+\/([-a-zA-Z0-9@:%_+.~#?&\/=]+)/
|
|
||||||
);
|
|
||||||
return {
|
|
||||||
url,
|
|
||||||
filePath: constants.httpContractsDirectory + match[1]
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function isValidDomain(v) {
|
function isValidDomain(v) {
|
||||||
// from: https://github.com/miguelmota/is-valid-domain
|
// from: https://github.com/miguelmota/is-valid-domain
|
||||||
if (typeof v !== 'string') return false;
|
if (typeof v !== 'string') return false;
|
||||||
|
@ -224,13 +120,6 @@ function isValidDomain(v) {
|
||||||
return isValid;
|
return isValid;
|
||||||
}
|
}
|
||||||
|
|
||||||
function groupBy(array, key) {
|
|
||||||
return array.reduce(function (rv, x) {
|
|
||||||
(rv[x[key]] = rv[x[key]] || []).push(x);
|
|
||||||
return rv;
|
|
||||||
}, {});
|
|
||||||
}
|
|
||||||
|
|
||||||
function interceptLogs(consoleContext, logger) {
|
function interceptLogs(consoleContext, logger) {
|
||||||
let context = {};
|
let context = {};
|
||||||
context.console = consoleContext;
|
context.console = consoleContext;
|
||||||
|
@ -273,22 +162,6 @@ function isEs6Module(module) {
|
||||||
return (typeof module === 'function' && isConstructor(module)) || (typeof module === 'object' && typeof module.default === 'function' && module.__esModule);
|
return (typeof module === 'function' && isConstructor(module)) || (typeof module === 'object' && typeof module.default === 'function' && module.__esModule);
|
||||||
}
|
}
|
||||||
|
|
||||||
function urlJoin(url, path) {
|
|
||||||
let urlChunks = url.split('/');
|
|
||||||
let levels = path.split('../');
|
|
||||||
|
|
||||||
// remove relative path parts from end of url
|
|
||||||
urlChunks = urlChunks.slice(0, urlChunks.length - levels.length);
|
|
||||||
|
|
||||||
// remove relative path parts from start of match
|
|
||||||
levels.splice(0, levels.length - 1);
|
|
||||||
|
|
||||||
// add on our match so we can join later
|
|
||||||
urlChunks = urlChunks.concat(levels.join().replace('./', ''));
|
|
||||||
|
|
||||||
return urlChunks.join('/');
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
dirname,
|
dirname,
|
||||||
filesMatchingPattern,
|
filesMatchingPattern,
|
||||||
|
@ -301,14 +174,10 @@ module.exports = {
|
||||||
isValidDomain,
|
isValidDomain,
|
||||||
cd,
|
cd,
|
||||||
sed,
|
sed,
|
||||||
downloadFile,
|
|
||||||
extractTar,
|
extractTar,
|
||||||
extractZip,
|
extractZip,
|
||||||
getExternalContractUrl,
|
|
||||||
normalizeInput,
|
normalizeInput,
|
||||||
groupBy,
|
|
||||||
interceptLogs,
|
interceptLogs,
|
||||||
getWindowSize,
|
getWindowSize,
|
||||||
isEs6Module,
|
isEs6Module
|
||||||
urlJoin
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*global describe, it, require*/
|
/*global describe, it, require*/
|
||||||
import { File, Types } from "../lib/core/file";
|
import { File, Types } from "embark-utils";
|
||||||
|
|
||||||
let ContractsManager = require('embark-contracts-manager');
|
let ContractsManager = require('embark-contracts-manager');
|
||||||
let Compiler = require('embark-compiler');
|
let Compiler = require('embark-compiler');
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*globals describe, it, before, beforeEach*/
|
/*globals describe, it, before, beforeEach*/
|
||||||
const {File, Types} = require("../lib/core/file");
|
import { File, Types } from 'embark-utils';
|
||||||
const Assert = require("assert");
|
const Assert = require("assert");
|
||||||
const {expect} = require("chai");
|
const {expect} = require("chai");
|
||||||
const fs = require("../lib/core/fs");
|
const fs = require("../lib/core/fs");
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
/*globals describe, it*/
|
/*globals describe, it*/
|
||||||
const { dappPath } = require('embark-utils');
|
const { dappPath, File, Types } = require('embark-utils');
|
||||||
const {File, Types} = require("../lib/core/file");
|
|
||||||
const {expect} = require("chai");
|
const {expect} = require("chai");
|
||||||
const fs = require("../lib/core/fs");
|
const fs = require("../lib/core/fs");
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*global describe, it, require*/
|
/*global describe, it, require*/
|
||||||
import { File, Types } from "../../../lib/core/file";
|
import { File, Types } from "embark-utils";
|
||||||
|
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
/*globals describe, it, before*/
|
/*globals describe, it, before*/
|
||||||
const { dappPath } = require('embark-utils');
|
const { dappPath, File, Types, prepareForCompilation } = require('embark-utils');
|
||||||
const {File, Types} = require("../../../lib/core/file");
|
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const remapImports = require("../../../lib/utils/solidity/remapImports");
|
|
||||||
const {expect} = require("chai");
|
const {expect} = require("chai");
|
||||||
const fs = require("../../../lib/core/fs");
|
const fs = require("../../../lib/core/fs");
|
||||||
const fsNode = require("fs");
|
const fsNode = require("fs");
|
||||||
|
@ -13,7 +11,7 @@ describe('embark.RemapImports', function () {
|
||||||
describe('Import remappings from local file', function () {
|
describe('Import remappings from local file', function () {
|
||||||
before('do the remappings', async () => {
|
before('do the remappings', async () => {
|
||||||
file = new File({path: 'contracts/recursive_test_0.sol', type: Types.dappFile});
|
file = new File({path: 'contracts/recursive_test_0.sol', type: Types.dappFile});
|
||||||
content = await remapImports.prepareForCompilation(file);
|
content = await prepareForCompilation(file);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should find and add remappings for all recursive imports", (done) => {
|
it("should find and add remappings for all recursive imports", (done) => {
|
||||||
|
@ -64,7 +62,7 @@ describe('embark.RemapImports', function () {
|
||||||
describe('Import remappings from external URL', function () {
|
describe('Import remappings from external URL', function () {
|
||||||
before('do the external HTTP contract remappings', async () => {
|
before('do the external HTTP contract remappings', async () => {
|
||||||
file = new File({externalUrl: 'https://github.com/embark-framework/embark/master/packages/embark/src/test/contracts/recursive_test_0.sol', type: Types.http});
|
file = new File({externalUrl: 'https://github.com/embark-framework/embark/master/packages/embark/src/test/contracts/recursive_test_0.sol', type: Types.http});
|
||||||
content = await remapImports.prepareForCompilation(file);
|
content = await prepareForCompilation(file);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should find and add remappings for all recursive imports", (done) => {
|
it("should find and add remappings for all recursive imports", (done) => {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
/*global describe, it, require*/
|
/*global describe, it, require*/
|
||||||
import { File, Types } from "../../../lib/core/file.js";
|
|
||||||
const fs = require('../../../lib/core/fs');
|
const fs = require('../../../lib/core/fs');
|
||||||
import { IPC } from 'embark-core';
|
import { IPC } from 'embark-core';
|
||||||
|
import { File, Types } from 'embark-utils';
|
||||||
let SolidityCompiler = require('embark-solidity');
|
let SolidityCompiler = require('embark-solidity');
|
||||||
let TestLogger = require('../../../lib/utils/test_logger');
|
let TestLogger = require('../../../lib/utils/test_logger');
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
/*global describe, it*/
|
/*global describe, it*/
|
||||||
const Utils = require('../lib/utils/utils');
|
const Utils = require('../lib/utils/utils');
|
||||||
|
import { getExternalContractUrl } from 'embark-utils';
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
const constants = require('embark-core/constants');
|
const constants = require('embark-core/constants');
|
||||||
|
|
||||||
describe('embark.utils', function () {
|
describe('embark.utils', function () {
|
||||||
describe('#getExternalContractUrl', function () {
|
describe('#getExternalContractUrl', function () {
|
||||||
it('should get the right url for a https://github file', function () {
|
it('should get the right url for a https://github file', function () {
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
'https://github.com/embark-framework/embark/blob/master/dapps/templates/demo/contracts/simple_storage.sol'
|
'https://github.com/embark-framework/embark/blob/master/dapps/templates/demo/contracts/simple_storage.sol'
|
||||||
);
|
);
|
||||||
assert.deepEqual(fileObj,
|
assert.deepEqual(fileObj,
|
||||||
|
@ -17,14 +18,14 @@ describe('embark.utils', function () {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for a malformed https://github file', function () {
|
it('should fail for a malformed https://github file', function () {
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
'https://github/embark-framework/embark/blob/master/dapps/templates/demo/contracts/simple_storage.sol'
|
'https://github/embark-framework/embark/blob/master/dapps/templates/demo/contracts/simple_storage.sol'
|
||||||
);
|
);
|
||||||
assert.strictEqual(fileObj, null);
|
assert.strictEqual(fileObj, null);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get the right url for a git:// file with no branch #', function () {
|
it('should get the right url for a git:// file with no branch #', function () {
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
'git://github.com/status-im/contracts/contracts/identity/ERC725.sol'
|
'git://github.com/status-im/contracts/contracts/identity/ERC725.sol'
|
||||||
);
|
);
|
||||||
assert.deepEqual(fileObj,
|
assert.deepEqual(fileObj,
|
||||||
|
@ -35,7 +36,7 @@ describe('embark.utils', function () {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get the right url for a git:// file with a branch #', function () {
|
it('should get the right url for a git:// file with a branch #', function () {
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
'git://github.com/status-im/contracts/contracts/identity/ERC725.sol#myBranch'
|
'git://github.com/status-im/contracts/contracts/identity/ERC725.sol#myBranch'
|
||||||
);
|
);
|
||||||
assert.deepEqual(fileObj,
|
assert.deepEqual(fileObj,
|
||||||
|
@ -46,14 +47,14 @@ describe('embark.utils', function () {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail when the git:// file is malformed', function () {
|
it('should fail when the git:// file is malformed', function () {
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
'git://github.com/identity/ERC725.sol#myBranch'
|
'git://github.com/identity/ERC725.sol#myBranch'
|
||||||
);
|
);
|
||||||
assert.strictEqual(fileObj, null);
|
assert.strictEqual(fileObj, null);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get the right url with a github.com file without branch #', function () {
|
it('should get the right url with a github.com file without branch #', function () {
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
'github.com/status-im/contracts/contracts/identity/ERC725.sol'
|
'github.com/status-im/contracts/contracts/identity/ERC725.sol'
|
||||||
);
|
);
|
||||||
assert.deepEqual(fileObj,
|
assert.deepEqual(fileObj,
|
||||||
|
@ -64,7 +65,7 @@ describe('embark.utils', function () {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get the right url with a github.com file with branch #', function () {
|
it('should get the right url with a github.com file with branch #', function () {
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
'github.com/status-im/contracts/contracts/identity/ERC725.sol#theBranch'
|
'github.com/status-im/contracts/contracts/identity/ERC725.sol#theBranch'
|
||||||
);
|
);
|
||||||
assert.deepEqual(fileObj,
|
assert.deepEqual(fileObj,
|
||||||
|
@ -75,14 +76,14 @@ describe('embark.utils', function () {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail with a malformed github.com url', function () {
|
it('should fail with a malformed github.com url', function () {
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
'github/status-im/contracts/contracts/identity/ERC725.sol#theBranch'
|
'github/status-im/contracts/contracts/identity/ERC725.sol#theBranch'
|
||||||
);
|
);
|
||||||
assert.strictEqual(fileObj, null);
|
assert.strictEqual(fileObj, null);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed with a generic http url', function () {
|
it('should succeed with a generic http url', function () {
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
'http://myurl.com/myFile.sol'
|
'http://myurl.com/myFile.sol'
|
||||||
);
|
);
|
||||||
assert.deepEqual(fileObj, {
|
assert.deepEqual(fileObj, {
|
||||||
|
@ -93,7 +94,7 @@ describe('embark.utils', function () {
|
||||||
|
|
||||||
it('should get the correct default url for a correct bzz:/ swarm file', function () {
|
it('should get the correct default url for a correct bzz:/ swarm file', function () {
|
||||||
const swarmFile = 'bzz:/someensdomain.eth/ERC725.sol';
|
const swarmFile = 'bzz:/someensdomain.eth/ERC725.sol';
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
swarmFile
|
swarmFile
|
||||||
);
|
);
|
||||||
assert.deepEqual(fileObj, {
|
assert.deepEqual(fileObj, {
|
||||||
|
@ -104,7 +105,7 @@ describe('embark.utils', function () {
|
||||||
|
|
||||||
it('should get the correct url for a correct bzz:/ swarm file when a http swarm gateway is explicitly provided', function () {
|
it('should get the correct url for a correct bzz:/ swarm file when a http swarm gateway is explicitly provided', function () {
|
||||||
const swarmFile = 'bzz:/someensdomain.eth/ERC725.sol';
|
const swarmFile = 'bzz:/someensdomain.eth/ERC725.sol';
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
swarmFile,
|
swarmFile,
|
||||||
'http://localhost:8500'
|
'http://localhost:8500'
|
||||||
);
|
);
|
||||||
|
@ -116,7 +117,7 @@ describe('embark.utils', function () {
|
||||||
|
|
||||||
it('should get the correct url for a correct bzz:/ swarm file when a https swarm gateway is provided', function () {
|
it('should get the correct url for a correct bzz:/ swarm file when a https swarm gateway is provided', function () {
|
||||||
const swarmFile = 'bzz:/1ffe993abc835f480f688d07ad75ad1dbdbd1ddb368a08b7ed4d3e400771dd63';
|
const swarmFile = 'bzz:/1ffe993abc835f480f688d07ad75ad1dbdbd1ddb368a08b7ed4d3e400771dd63';
|
||||||
const fileObj = Utils.getExternalContractUrl(
|
const fileObj = getExternalContractUrl(
|
||||||
swarmFile,
|
swarmFile,
|
||||||
'https://swarm-gateways.net'
|
'https://swarm-gateways.net'
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
|
"allowJs": true,
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"isolatedModules": true,
|
|
||||||
"lib": ["ES2017"],
|
"lib": ["ES2017"],
|
||||||
"module": "CommonJS",
|
"module": "CommonJS",
|
||||||
"noEmit": true,
|
"noEmit": true,
|
||||||
|
|
Loading…
Reference in New Issue