First working version
This commit is contained in:
parent
f8e22177fe
commit
963aed8b8e
|
@ -1,16 +0,0 @@
|
|||
var colors = require("colors");
|
||||
var TruffleError = require("truffle-error");
|
||||
var inherits = require("util").inherits;
|
||||
|
||||
inherits(CompileError, TruffleError);
|
||||
|
||||
function CompileError(message) {
|
||||
// Note we trim() because solc likes to add extra whitespace.
|
||||
var fancy_message = message.trim() + "\n" + colors.red("Compilation failed. See above.");
|
||||
var normal_message = message.trim();
|
||||
|
||||
CompileError.super_.call(this, normal_message);
|
||||
this.message = fancy_message;
|
||||
};
|
||||
|
||||
module.exports = CompileError;
|
|
@ -1,96 +0,0 @@
|
|||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const semver = require("semver");
|
||||
|
||||
const {
|
||||
Bundled,
|
||||
Docker,
|
||||
Local,
|
||||
Native,
|
||||
VersionRange
|
||||
} = require("./loadingStrategies");
|
||||
|
||||
class CompilerSupplier {
|
||||
constructor(_config) {
|
||||
_config = _config || {};
|
||||
const defaultConfig = { version: null };
|
||||
this.config = Object.assign({}, defaultConfig, _config);
|
||||
this.strategyOptions = { version: this.config.version };
|
||||
}
|
||||
|
||||
badInputError(userSpecification) {
|
||||
const message =
|
||||
`Could not find a compiler version matching ${userSpecification}. ` +
|
||||
`compilers.solc.version option must be a string specifying:\n` +
|
||||
` - a path to a locally installed solcjs\n` +
|
||||
` - a solc version or range (ex: '0.4.22' or '^0.5.0')\n` +
|
||||
` - a docker image name (ex: 'stable')\n` +
|
||||
` - 'native' to use natively installed solc\n`;
|
||||
return new Error(message);
|
||||
}
|
||||
|
||||
load() {
|
||||
const userSpecification = this.config.version;
|
||||
|
||||
return new Promise(async (resolve, reject) => {
|
||||
let strategy;
|
||||
const useDocker = this.config.docker;
|
||||
const useNative = userSpecification === "native";
|
||||
const useBundledSolc = !userSpecification;
|
||||
const useSpecifiedLocal =
|
||||
userSpecification && this.fileExists(userSpecification);
|
||||
const isValidVersionRange = semver.validRange(userSpecification);
|
||||
|
||||
if (useDocker) {
|
||||
strategy = new Docker(this.strategyOptions);
|
||||
} else if (useNative) {
|
||||
strategy = new Native(this.strategyOptions);
|
||||
} else if (useBundledSolc) {
|
||||
strategy = new Bundled(this.strategyOptions);
|
||||
} else if (useSpecifiedLocal) {
|
||||
strategy = new Local(this.strategyOptions);
|
||||
} else if (isValidVersionRange) {
|
||||
strategy = new VersionRange(this.strategyOptions);
|
||||
}
|
||||
|
||||
if (strategy) {
|
||||
try {
|
||||
const solc = await strategy.load(userSpecification);
|
||||
resolve(solc);
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
} else {
|
||||
reject(this.badInputError(userSpecification));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fileExists(localPath) {
|
||||
return fs.existsSync(localPath) || path.isAbsolute(localPath);
|
||||
}
|
||||
|
||||
getDockerTags() {
|
||||
return new Docker(this.strategyOptions).getDockerTags();
|
||||
}
|
||||
|
||||
getReleases() {
|
||||
return new VersionRange(this.strategyOptions)
|
||||
.getSolcVersions()
|
||||
.then(list => {
|
||||
const prereleases = list.builds
|
||||
.filter(build => build["prerelease"])
|
||||
.map(build => build["longVersion"]);
|
||||
|
||||
const releases = Object.keys(list.releases);
|
||||
|
||||
return {
|
||||
prereleases: prereleases,
|
||||
releases: releases,
|
||||
latestRelease: list.latestRelease
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CompilerSupplier;
|
|
@ -1,14 +0,0 @@
|
|||
const LoadingStrategy = require("./LoadingStrategy");
|
||||
|
||||
class Bundled extends LoadingStrategy {
|
||||
load() {
|
||||
return this.getBundledSolc();
|
||||
}
|
||||
|
||||
getBundledSolc() {
|
||||
this.removeListener();
|
||||
return require("solc");
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Bundled;
|
|
@ -1,101 +0,0 @@
|
|||
const request = require("request-promise");
|
||||
const fs = require("fs");
|
||||
const { execSync } = require("child_process");
|
||||
const ora = require("ora");
|
||||
const semver = require("semver");
|
||||
const LoadingStrategy = require("./LoadingStrategy");
|
||||
const VersionRange = require("./VersionRange");
|
||||
|
||||
class Docker extends LoadingStrategy {
|
||||
async load() {
|
||||
const versionString = await this.validateAndGetSolcVersion();
|
||||
const command =
|
||||
"docker run -i ethereum/solc:" + this.config.version + " --standard-json";
|
||||
|
||||
const versionRange = new VersionRange();
|
||||
const commit = versionRange.getCommitFromVersion(versionString);
|
||||
|
||||
return versionRange
|
||||
.getSolcByCommit(commit)
|
||||
.then(solcjs => {
|
||||
return {
|
||||
compile: options => String(execSync(command, { input: options })),
|
||||
version: () => versionString,
|
||||
importsParser: solcjs
|
||||
};
|
||||
})
|
||||
.catch(error => {
|
||||
if (error.message === "No matching version found") {
|
||||
throw this.errors("noVersion", versionString);
|
||||
}
|
||||
throw new Error(error);
|
||||
});
|
||||
}
|
||||
|
||||
getDockerTags() {
|
||||
return request(this.config.dockerTagsUrl)
|
||||
.then(list => JSON.parse(list).results.map(item => item.name))
|
||||
.catch(error => {
|
||||
throw this.errors("noRequest", this.config.dockerTagsUrl, error);
|
||||
});
|
||||
}
|
||||
|
||||
downloadDockerImage(image) {
|
||||
if (!semver.valid(image)) {
|
||||
const message =
|
||||
`The image version you have provided is not valid.\n` +
|
||||
`Please ensure that ${image} is a valid docker image name.`;
|
||||
throw new Error(message);
|
||||
}
|
||||
const spinner = ora({
|
||||
text: "Downloading Docker image",
|
||||
color: "red"
|
||||
}).start();
|
||||
try {
|
||||
execSync(`docker pull ethereum/solc:${image}`);
|
||||
spinner.stop();
|
||||
} catch (error) {
|
||||
spinner.stop();
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
|
||||
async validateAndGetSolcVersion() {
|
||||
const image = this.config.version;
|
||||
const fileName = image + ".version";
|
||||
|
||||
// Skip validation if they've validated for this image before.
|
||||
if (this.fileIsCached(fileName)) {
|
||||
const cachePath = this.resolveCache(fileName);
|
||||
return fs.readFileSync(cachePath, "utf-8");
|
||||
}
|
||||
// Image specified
|
||||
if (!image) throw this.errors("noString", image);
|
||||
|
||||
// Docker exists locally
|
||||
try {
|
||||
execSync("docker -v");
|
||||
} catch (error) {
|
||||
throw this.errors("noDocker");
|
||||
}
|
||||
|
||||
// Image exists locally
|
||||
try {
|
||||
execSync("docker inspect --type=image ethereum/solc:" + image);
|
||||
} catch (error) {
|
||||
console.log(`${image} does not exist locally.\n`);
|
||||
console.log("Attempting to download the Docker image.");
|
||||
this.downloadDockerImage(image);
|
||||
}
|
||||
|
||||
// Get version & cache.
|
||||
const version = execSync(
|
||||
"docker run ethereum/solc:" + image + " --version"
|
||||
);
|
||||
const normalized = new VersionRange().normalizeSolcVersion(version);
|
||||
this.addFileToCache(normalized, fileName);
|
||||
return normalized;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Docker;
|
|
@ -1,94 +0,0 @@
|
|||
const findCacheDir = require("find-cache-dir");
|
||||
const fs = require("fs");
|
||||
|
||||
class LoadingStrategy {
|
||||
constructor(options) {
|
||||
const defaultConfig = {
|
||||
versionsUrl: "https://solc-bin.ethereum.org/bin/list.json",
|
||||
compilerUrlRoot: "https://solc-bin.ethereum.org/bin/",
|
||||
dockerTagsUrl:
|
||||
"https://registry.hub.docker.com/v2/repositories/ethereum/solc/tags/"
|
||||
};
|
||||
this.config = Object.assign({}, defaultConfig, options);
|
||||
this.cachePath = findCacheDir({
|
||||
name: "truffle",
|
||||
cwd: __dirname,
|
||||
create: true
|
||||
});
|
||||
}
|
||||
|
||||
addFileToCache(code, fileName) {
|
||||
const filePath = this.resolveCache(fileName);
|
||||
fs.writeFileSync(filePath, code);
|
||||
}
|
||||
|
||||
errors(kind, input, error) {
|
||||
const info = "Run `truffle compile --list` to see available versions.";
|
||||
|
||||
const kinds = {
|
||||
noPath: "Could not find compiler at: " + input,
|
||||
noVersion:
|
||||
`Could not find a compiler version matching ${input}. ` +
|
||||
`Please ensure you are specifying a valid version, constraint or ` +
|
||||
`build in the truffle config. ${info}`,
|
||||
noRequest:
|
||||
"Failed to complete request to: " +
|
||||
input +
|
||||
". Are you connected to the internet?\n\n" +
|
||||
error,
|
||||
noDocker:
|
||||
"You are trying to run dockerized solc, but docker is not installed.",
|
||||
noImage:
|
||||
"Please pull " +
|
||||
input +
|
||||
" from docker before trying to compile with it.",
|
||||
noNative: "Could not execute local solc binary: " + error,
|
||||
noString:
|
||||
"`compilers.solc.version` option must be a string specifying:\n" +
|
||||
" - a path to a locally installed solcjs\n" +
|
||||
" - a solc version or range (ex: '0.4.22' or '^0.5.0')\n" +
|
||||
" - a docker image name (ex: 'stable')\n" +
|
||||
" - 'native' to use natively installed solc\n" +
|
||||
"Received: " +
|
||||
input +
|
||||
" instead."
|
||||
};
|
||||
|
||||
return new Error(kinds[kind]);
|
||||
}
|
||||
|
||||
fileIsCached(fileName) {
|
||||
const file = this.resolveCache(fileName);
|
||||
return fs.existsSync(file);
|
||||
}
|
||||
|
||||
load(_userSpecification) {
|
||||
throw new Error(
|
||||
"Abstract method LoadingStrategy.load is not implemented for this strategy."
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up error listeners set (by solc?) when requiring it. (This code inherited from
|
||||
* previous implementation, note to self - ask Tim about this)
|
||||
*/
|
||||
removeListener() {
|
||||
const listeners = process.listeners("uncaughtException");
|
||||
const execeptionHandler = listeners[listeners.length - 1];
|
||||
|
||||
if (execeptionHandler) {
|
||||
process.removeListener("uncaughtException", execeptionHandler);
|
||||
}
|
||||
}
|
||||
|
||||
resolveCache(fileName) {
|
||||
const thunk = findCacheDir({
|
||||
name: "truffle",
|
||||
cwd: __dirname,
|
||||
thunk: true
|
||||
});
|
||||
return thunk(fileName);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LoadingStrategy;
|
|
@ -1,26 +0,0 @@
|
|||
const path = require("path");
|
||||
const originalRequire = require("original-require");
|
||||
const LoadingStrategy = require("./LoadingStrategy");
|
||||
|
||||
class Local extends LoadingStrategy {
|
||||
load(localPath) {
|
||||
return this.getLocalCompiler(localPath);
|
||||
}
|
||||
|
||||
getLocalCompiler(localPath) {
|
||||
let compiler, compilerPath;
|
||||
compilerPath = path.isAbsolute(localPath)
|
||||
? localPath
|
||||
: path.resolve(process.cwd(), localPath);
|
||||
|
||||
try {
|
||||
compiler = originalRequire(compilerPath);
|
||||
this.removeListener();
|
||||
} catch (error) {
|
||||
throw this.errors("noPath", localPath, error);
|
||||
}
|
||||
return compiler;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Local;
|
|
@ -1,40 +0,0 @@
|
|||
const { execSync } = require("child_process");
|
||||
const LoadingStrategy = require("./LoadingStrategy");
|
||||
const VersionRange = require("./VersionRange");
|
||||
|
||||
class Native extends LoadingStrategy {
|
||||
load() {
|
||||
const versionString = this.validateAndGetSolcVersion();
|
||||
const command = "solc --standard-json";
|
||||
|
||||
const versionRange = new VersionRange();
|
||||
const commit = versionRange.getCommitFromVersion(versionString);
|
||||
return versionRange
|
||||
.getSolcByCommit(commit)
|
||||
.then(solcjs => {
|
||||
return {
|
||||
compile: options => String(execSync(command, { input: options })),
|
||||
version: () => versionString,
|
||||
importsParser: solcjs
|
||||
};
|
||||
})
|
||||
.catch(error => {
|
||||
if (error.message === "No matching version found") {
|
||||
throw this.errors("noVersion", versionString);
|
||||
}
|
||||
throw new Error(error);
|
||||
});
|
||||
}
|
||||
|
||||
validateAndGetSolcVersion() {
|
||||
let version;
|
||||
try {
|
||||
version = execSync("solc --version");
|
||||
} catch (error) {
|
||||
throw this.errors("noNative", null, error);
|
||||
}
|
||||
return new VersionRange().normalizeSolcVersion(version);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Native;
|
|
@ -1,209 +0,0 @@
|
|||
const debug = require("debug")("compile:compilerSupplier");
|
||||
const requireFromString = require("require-from-string");
|
||||
const fs = require("fs");
|
||||
const ora = require("ora");
|
||||
const originalRequire = require("original-require");
|
||||
const request = require("request-promise");
|
||||
const semver = require("semver");
|
||||
const solcWrap = require("solc/wrapper");
|
||||
const LoadingStrategy = require("./LoadingStrategy");
|
||||
|
||||
class VersionRange extends LoadingStrategy {
|
||||
compilerFromString(code) {
|
||||
const soljson = requireFromString(code);
|
||||
const wrapped = solcWrap(soljson);
|
||||
this.removeListener();
|
||||
return wrapped;
|
||||
}
|
||||
|
||||
findNewestValidVersion(version, allVersions) {
|
||||
if (!semver.validRange(version)) return null;
|
||||
const satisfyingVersions = Object.keys(allVersions.releases)
|
||||
.map(solcVersion => {
|
||||
if (semver.satisfies(solcVersion, version)) return solcVersion;
|
||||
})
|
||||
.filter(solcVersion => solcVersion);
|
||||
if (satisfyingVersions.length > 0) {
|
||||
return satisfyingVersions.reduce((newestVersion, version) => {
|
||||
return semver.gtr(version, newestVersion) ? version : newestVersion;
|
||||
}, "0.0.0");
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
getCachedSolcByFileName(fileName) {
|
||||
const filePath = this.resolveCache(fileName);
|
||||
const soljson = originalRequire(filePath);
|
||||
debug("soljson %o", soljson);
|
||||
const wrapped = solcWrap(soljson);
|
||||
this.removeListener();
|
||||
return wrapped;
|
||||
}
|
||||
|
||||
// Range can also be a single version specification like "0.5.0"
|
||||
getCachedSolcByVersionRange(version) {
|
||||
const cachedCompilerFileNames = fs.readdirSync(this.cachePath);
|
||||
const validVersions = cachedCompilerFileNames.filter(fileName => {
|
||||
const match = fileName.match(/v\d+\.\d+\.\d+.*/);
|
||||
if (match) return semver.satisfies(match[0], version);
|
||||
});
|
||||
|
||||
const multipleValidVersions = validVersions.length > 1;
|
||||
const compilerFileName = multipleValidVersions
|
||||
? this.getMostRecentVersionOfCompiler(validVersions)
|
||||
: validVersions[0];
|
||||
return this.getCachedSolcByFileName(compilerFileName);
|
||||
}
|
||||
|
||||
getCachedSolcFileName(commit) {
|
||||
const cachedCompilerFileNames = fs.readdirSync(this.cachePath);
|
||||
return cachedCompilerFileNames.find(fileName => {
|
||||
return fileName.includes(commit);
|
||||
});
|
||||
}
|
||||
|
||||
getCommitFromVersion(versionString) {
|
||||
return "commit." + versionString.match(/commit\.(.*?)\./)[1];
|
||||
}
|
||||
|
||||
getMostRecentVersionOfCompiler(versions) {
|
||||
return versions.reduce((mostRecentVersionFileName, fileName) => {
|
||||
const match = fileName.match(/v\d+\.\d+\.\d+.*/);
|
||||
const mostRecentVersionMatch = mostRecentVersionFileName.match(
|
||||
/v\d+\.\d+\.\d+.*/
|
||||
);
|
||||
return semver.gtr(match[0], mostRecentVersionMatch[0])
|
||||
? fileName
|
||||
: mostRecentVersionFileName;
|
||||
}, "-v0.0.0+commit");
|
||||
}
|
||||
|
||||
getSatisfyingVersionFromCache(versionRange) {
|
||||
if (this.versionIsCached(versionRange)) {
|
||||
return this.getCachedSolcByVersionRange(versionRange);
|
||||
}
|
||||
throw this.errors("noVersion", versionRange);
|
||||
}
|
||||
|
||||
async getSolcByCommit(commit) {
|
||||
const solcFileName = this.getCachedSolcFileName(commit);
|
||||
if (solcFileName) return this.getCachedSolcByFileName(solcFileName);
|
||||
|
||||
const allVersions = await this.getSolcVersions(this.config.versionsUrl);
|
||||
const fileName = this.getSolcVersionFileName(commit, allVersions);
|
||||
|
||||
if (!fileName) throw new Error("No matching version found");
|
||||
|
||||
return this.getSolcByUrlAndCache(fileName);
|
||||
}
|
||||
|
||||
async getSolcByUrlAndCache(fileName) {
|
||||
const url = this.config.compilerUrlRoot + fileName;
|
||||
const spinner = ora({
|
||||
text: "Downloading compiler",
|
||||
color: "red"
|
||||
}).start();
|
||||
try {
|
||||
const response = await request.get(url);
|
||||
spinner.stop();
|
||||
this.addFileToCache(response, fileName);
|
||||
return this.compilerFromString(response);
|
||||
} catch (error) {
|
||||
spinner.stop();
|
||||
throw this.errors("noRequest", url, error);
|
||||
}
|
||||
}
|
||||
|
||||
async getSolcFromCacheOrUrl(version) {
|
||||
let allVersions;
|
||||
try {
|
||||
allVersions = await this.getSolcVersions(this.config.versionsUrl);
|
||||
} catch (error) {
|
||||
throw this.errors("noRequest", version, error);
|
||||
}
|
||||
|
||||
const fileName = this.getSolcVersionFileName(version, allVersions);
|
||||
if (!fileName) throw this.errors("noVersion", version);
|
||||
|
||||
if (this.fileIsCached(fileName))
|
||||
return this.getCachedSolcByFileName(fileName);
|
||||
|
||||
return this.getSolcByUrlAndCache(fileName);
|
||||
}
|
||||
|
||||
getSolcVersions() {
|
||||
const spinner = ora({
|
||||
text: "Fetching solc version list from solc-bin",
|
||||
color: "yellow"
|
||||
}).start();
|
||||
|
||||
return request(this.config.versionsUrl)
|
||||
.then(list => {
|
||||
spinner.stop();
|
||||
return JSON.parse(list);
|
||||
})
|
||||
.catch(err => {
|
||||
spinner.stop();
|
||||
throw this.errors("noRequest", this.config.versionsUrl, err);
|
||||
});
|
||||
}
|
||||
|
||||
getSolcVersionFileName(version, allVersions) {
|
||||
if (allVersions.releases[version]) return allVersions.releases[version];
|
||||
|
||||
const isPrerelease =
|
||||
version.includes("nightly") || version.includes("commit");
|
||||
|
||||
if (isPrerelease) {
|
||||
for (let build of allVersions.builds) {
|
||||
const exists =
|
||||
build["prerelease"] === version ||
|
||||
build["build"] === version ||
|
||||
build["longVersion"] === version;
|
||||
|
||||
if (exists) return build["path"];
|
||||
}
|
||||
}
|
||||
|
||||
const versionToUse = this.findNewestValidVersion(version, allVersions);
|
||||
|
||||
if (versionToUse) return allVersions.releases[versionToUse];
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async load(versionRange) {
|
||||
const rangeIsSingleVersion = semver.valid(versionRange);
|
||||
if (rangeIsSingleVersion && this.versionIsCached(versionRange)) {
|
||||
return this.getCachedSolcByVersionRange(versionRange);
|
||||
}
|
||||
|
||||
try {
|
||||
return await this.getSolcFromCacheOrUrl(versionRange);
|
||||
} catch (error) {
|
||||
if (error.message.includes("Failed to complete request")) {
|
||||
return this.getSatisfyingVersionFromCache(versionRange);
|
||||
}
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
|
||||
normalizeSolcVersion(input) {
|
||||
const version = String(input);
|
||||
return version.split(":")[1].trim();
|
||||
}
|
||||
|
||||
versionIsCached(version) {
|
||||
const cachedCompilerFileNames = fs.readdirSync(this.cachePath);
|
||||
const cachedVersions = cachedCompilerFileNames.map(fileName => {
|
||||
const match = fileName.match(/v\d+\.\d+\.\d+.*/);
|
||||
if (match) return match[0];
|
||||
});
|
||||
return cachedVersions.find(cachedVersion =>
|
||||
semver.satisfies(cachedVersion, version)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = VersionRange;
|
|
@ -1,8 +0,0 @@
|
|||
module.exports = {
|
||||
Bundled: require("./Bundled"),
|
||||
Docker: require("./Docker"),
|
||||
LoadingStrategy: require("./LoadingStrategy"),
|
||||
Local: require("./Local"),
|
||||
Native: require("./Native"),
|
||||
VersionRange: require("./VersionRange")
|
||||
};
|
|
@ -1,416 +0,0 @@
|
|||
/*
|
||||
This code is modified from truffle-compile.
|
||||
|
||||
However, on solcjs.compile we allow an import callback function to get source code text
|
||||
which is important so we populate AST information more fully.
|
||||
|
||||
The other change is that we gather data on a per file basis rather than on a per contract basis.
|
||||
|
||||
Note: the use of var vs let/const is a holdover from truffle-compile.
|
||||
*/
|
||||
|
||||
const assert = require('assert');
|
||||
const fs = require('fs');
|
||||
const OS = require("os");
|
||||
const path = require("path");
|
||||
const Profiler = require("./profiler");
|
||||
const CompileError = require("./compileerror");
|
||||
const CompilerSupplier = require("./compilerSupplier");
|
||||
const expect = require("truffle-expect");
|
||||
const find_contracts = require("truffle-contract-sources");
|
||||
const Config = require("truffle-config");
|
||||
|
||||
const getSourceFileName = sourcePath => {
|
||||
let shortName = path.basename(sourcePath);
|
||||
if (shortName.endsWith('.sol')) {
|
||||
shortName = shortName.slice(0, -4)
|
||||
}
|
||||
return shortName;
|
||||
}
|
||||
|
||||
function sourcePath2BuildPath(sourcePath, buildDir) {
|
||||
const shortName = getSourceFileName(sourcePath);
|
||||
return path.join(buildDir, shortName + '.json')
|
||||
}
|
||||
|
||||
/* returns true if directory/file out of date
|
||||
*/
|
||||
function staleBuildContract (sourcePath, buildPath) {
|
||||
let sourcePathStat, buildPathStat;
|
||||
try {
|
||||
sourcePathStat = fs.statSync(sourcePath);
|
||||
} catch (err) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
buildPathStat = fs.statSync(buildPath);
|
||||
} catch (err) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const sourceMtime = sourcePathStat.mtime;
|
||||
const buildMtime = buildPathStat.mtime;
|
||||
return sourceMtime > buildMtime;
|
||||
};
|
||||
|
||||
|
||||
// Recent versions of truffle seem to add __ to the end of the bytecode
|
||||
const cleanBytecode = bytecode => {
|
||||
let cleanedBytecode = bytecode.replace(/_.+$/, '');
|
||||
cleanedBytecode = `0x${cleanedBytecode}`;
|
||||
return cleanedBytecode;
|
||||
}
|
||||
|
||||
|
||||
const normalizeJsonOutput = (jsonObject, allSources, options) => {
|
||||
const { contracts, sources, compiler, updatedAt } = jsonObject;
|
||||
const result = {
|
||||
compiler,
|
||||
updatedAt,
|
||||
sources: {},
|
||||
};
|
||||
|
||||
for (const [ sourcePath, solData ] of Object.entries(contracts)) {
|
||||
if (!result.sources[sourcePath]) {
|
||||
result.sources[sourcePath] = {
|
||||
// sourcePath,
|
||||
contracts: [],
|
||||
};
|
||||
}
|
||||
for (const [ contractName, contractData ] of Object.entries(solData)) {
|
||||
const o = {
|
||||
contractName,
|
||||
bytecode: cleanBytecode(contractData.evm.bytecode.object),
|
||||
deployedBytecode: cleanBytecode(contractData.evm.deployedBytecode.object),
|
||||
sourceMap: contractData.evm.bytecode.sourceMap,
|
||||
deployedSourceMap: contractData.evm.deployedBytecode.sourceMap,
|
||||
};
|
||||
|
||||
result.sources[sourcePath].contracts.push(o);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [ sourcePath, solData ] of Object.entries(sources)) {
|
||||
if (!result.sources[sourcePath]) {
|
||||
continue;
|
||||
}
|
||||
result.sources[sourcePath].ast = solData.ast;
|
||||
result.sources[sourcePath].legacyAST = solData.legacyAST;
|
||||
result.sources[sourcePath].id = solData.id;
|
||||
|
||||
result.sources[sourcePath].source = allSources[sourcePath];
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
// Most basic of the compile commands. Takes a sources, where
|
||||
// the keys are file or module paths and the values are the bodies of
|
||||
// the contracts. Does not evaulate dependencies that aren't already given.
|
||||
//
|
||||
// Default options:
|
||||
// {
|
||||
// strict: false,
|
||||
// quiet: false,
|
||||
// logger: console
|
||||
// }
|
||||
var compile = function(sourcePath, allSources, options, callback, isStale) {
|
||||
|
||||
if (typeof options === "function") {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
if (options.logger === undefined) options.logger = console;
|
||||
|
||||
var hasTargets =
|
||||
options.compilationTargets && options.compilationTargets.length;
|
||||
|
||||
expect.options(options, ["contracts_directory", "compilers"]);
|
||||
|
||||
expect.options(options.compilers, ["solc"]);
|
||||
|
||||
options.compilers.solc.settings.evmVersion =
|
||||
options.compilers.solc.settings.evmVersion ||
|
||||
options.compilers.solc.evmVersion ||
|
||||
{};
|
||||
options.compilers.solc.settings.optimizer =
|
||||
options.compilers.solc.settings.optimizer ||
|
||||
options.compilers.solc.optimizer ||
|
||||
{};
|
||||
|
||||
// Ensure sources have operating system independent paths
|
||||
// i.e., convert backslashes to forward slashes; things like C: are left intact.
|
||||
var operatingSystemIndependentSources = {};
|
||||
var operatingSystemIndependentTargets = {};
|
||||
var originalPathMappings = {};
|
||||
|
||||
var defaultSelectors = {
|
||||
"": ["legacyAST", "ast"],
|
||||
"*": [
|
||||
"abi",
|
||||
"evm.bytecode.object",
|
||||
"evm.bytecode.sourceMap",
|
||||
"evm.deployedBytecode.object",
|
||||
"evm.deployedBytecode.sourceMap",
|
||||
"userdoc",
|
||||
"devdoc"
|
||||
]
|
||||
};
|
||||
|
||||
// Specify compilation targets
|
||||
// Each target uses defaultSelectors, defaulting to single target `*` if targets are unspecified
|
||||
var outputSelection = {};
|
||||
var targets = operatingSystemIndependentTargets;
|
||||
var targetPaths = Object.keys(targets);
|
||||
|
||||
targetPaths.length
|
||||
? targetPaths.forEach(key => (outputSelection[key] = defaultSelectors))
|
||||
: (outputSelection["*"] = defaultSelectors);
|
||||
|
||||
var solcStandardInput = {
|
||||
language: "Solidity",
|
||||
sources: {},
|
||||
settings: {
|
||||
evmVersion: options.compilers.solc.settings.evmVersion,
|
||||
optimizer: options.compilers.solc.settings.optimizer,
|
||||
outputSelection
|
||||
}
|
||||
};
|
||||
|
||||
// Load solc module only when compilation is actually required.
|
||||
const supplier = new CompilerSupplier(options.compilers.solc);
|
||||
|
||||
supplier
|
||||
.load()
|
||||
.then(solc => {
|
||||
|
||||
const solcVersion = solc.version();
|
||||
|
||||
solcStandardInput.sources = {};
|
||||
Object.keys(allSources).forEach(p => {
|
||||
solcStandardInput.sources[p] = {
|
||||
content: allSources[p],
|
||||
}
|
||||
});
|
||||
|
||||
const result = solc.compile(JSON.stringify(solcStandardInput));
|
||||
|
||||
var standardOutput = JSON.parse(result);
|
||||
|
||||
var errors = standardOutput.errors || [];
|
||||
var warnings = [];
|
||||
|
||||
if (options.strict !== true) {
|
||||
warnings = errors.filter(function(error) {
|
||||
return error.severity === "warning";
|
||||
});
|
||||
|
||||
errors = errors.filter(function(error) {
|
||||
return error.severity !== "warning";
|
||||
});
|
||||
|
||||
if (options.quiet !== true && warnings.length > 0) {
|
||||
options.logger.log(
|
||||
OS.EOL + "Compilation warnings encountered:" + OS.EOL
|
||||
);
|
||||
options.logger.log(
|
||||
warnings
|
||||
.map(function(warning) {
|
||||
return warning.formattedMessage;
|
||||
})
|
||||
.join()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
options.logger.log("");
|
||||
return callback(
|
||||
new CompileError(
|
||||
standardOutput.errors
|
||||
.map(function(error) {
|
||||
return error.formattedMessage;
|
||||
})
|
||||
.join()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
standardOutput.compiler = {
|
||||
name: "solc",
|
||||
version: solcVersion
|
||||
};
|
||||
standardOutput.source = allSources[sourcePath];
|
||||
standardOutput.updatedAt = new Date();
|
||||
|
||||
const normalizedOutput = normalizeJsonOutput(standardOutput, allSources, options);
|
||||
|
||||
// FIXME: the below return path is hoaky, because it is in the format that
|
||||
// the multiPromisify'd caller in workflow-compile expects.
|
||||
const shortName = getSourceFileName(sourcePath);
|
||||
|
||||
callback(null, {[shortName]: normalizedOutput}, isStale);
|
||||
})
|
||||
.catch(e => {
|
||||
throw e
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Compiles all source files whether they need it or not
|
||||
*
|
||||
* @param {Config} options - truffle config option
|
||||
* @param {function} callback - called on every source file found
|
||||
*
|
||||
* options.contracts_directory is a directory path where .sol files can be found.
|
||||
*/
|
||||
compile.all = function(options, callback) {
|
||||
find_contracts(options.contracts_directory, function(err, files) {
|
||||
if (err) return callback(err);
|
||||
|
||||
options.paths = files;
|
||||
compile.with_dependencies(options, callback, true);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Compiles only source files that need updating. We use
|
||||
* Make-style dependency check of timestamp + missing file
|
||||
*
|
||||
* @param {Config} options - truffle config option
|
||||
* @param {function} callback - called on every source file found
|
||||
*
|
||||
*/
|
||||
compile.necessary = function(options, callback) {
|
||||
options.logger = options.logger || console;
|
||||
|
||||
Profiler.updated(options, function(err, updated) {
|
||||
if (err) return callback(err);
|
||||
|
||||
if (updated.length === 0 && options.quiet !== true) {
|
||||
return callback(null, [], {});
|
||||
}
|
||||
|
||||
options.paths = updated;
|
||||
compile.with_dependencies(options, callback, false);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Compiles a source file and all of the files that it
|
||||
* depends on.
|
||||
*
|
||||
* @param {Config} options - truffle config option
|
||||
* @param {function} callback - called on every source file found
|
||||
* @param {boolean} compileAll - if true compile whether or not it
|
||||
* the file was deemed out of date.
|
||||
*
|
||||
*/
|
||||
compile.with_dependencies = async function(options, callback, compileAll) {
|
||||
var self = this;
|
||||
|
||||
options.logger = options.logger || console;
|
||||
options.contracts_directory = options.contracts_directory || process.cwd();
|
||||
|
||||
expect.options(options, [
|
||||
"paths",
|
||||
"working_directory",
|
||||
"contracts_directory",
|
||||
"resolver",
|
||||
]);
|
||||
|
||||
var config = Config.default().merge(options);
|
||||
|
||||
// Filter out of the list of files to be compiled those for which we have a JSON that
|
||||
// is newer than the last modified time of the source file.
|
||||
|
||||
const staleSolFiles = [];
|
||||
const filteredRequired = [];
|
||||
for (const sourcePath of options.paths) {
|
||||
const targetJsonPath = sourcePath2BuildPath(sourcePath, options.build_mythx_contracts);
|
||||
if (compileAll || staleBuildContract(sourcePath, targetJsonPath)) {
|
||||
// Set for compilation
|
||||
filteredRequired.push(sourcePath);
|
||||
} else {
|
||||
staleSolFiles.push(sourcePath);
|
||||
}
|
||||
}
|
||||
|
||||
let isSolcLoaded = false;
|
||||
for (const sourcePath of filteredRequired) {
|
||||
if (!sourcePath.endsWith('/Migrations.sol')) {
|
||||
|
||||
// if solc is not loaded yet, load for cache.
|
||||
if (!isSolcLoaded) {
|
||||
const supplier = new CompilerSupplier(options.compilers.solc);
|
||||
await supplier
|
||||
.load()
|
||||
.then(solc => {
|
||||
// do nothing
|
||||
})
|
||||
.catch(e => {
|
||||
throw e;
|
||||
})
|
||||
isSolcLoaded = true;
|
||||
}
|
||||
|
||||
Profiler.imported_sources(
|
||||
config.with({
|
||||
paths: [sourcePath],
|
||||
base_path: options.contracts_directory,
|
||||
resolver: options.resolver,
|
||||
}),
|
||||
(err, allSources, required) => {
|
||||
if (err) return callback(err);
|
||||
self.display(sourcePath, Object.keys(allSources), options)
|
||||
compile(sourcePath, allSources, options, callback, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
staleSolFiles.forEach(sourcePath => {
|
||||
const targetJsonPath = sourcePath2BuildPath(sourcePath, options.build_mythx_contracts);
|
||||
// Pick up from existing JSON
|
||||
const buildJson = fs.readFileSync(targetJsonPath, 'utf8');
|
||||
const buildObj = JSON.parse(buildJson);
|
||||
const shortName = getSourceFileName(sourcePath);
|
||||
callback(null, {[shortName]: buildObj}, false);
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Show what file is being compiled.
|
||||
*/
|
||||
compile.display = function(targetPath, paths, options) {
|
||||
if (options.quiet !== true) {
|
||||
if (path.isAbsolute(targetPath)) {
|
||||
const absTargetPath =
|
||||
"." + path.sep + path.relative(options.working_directory, targetPath);
|
||||
options.logger.log("Compiling " + absTargetPath + "...");
|
||||
} else {
|
||||
options.logger.log("Compiling " + targetPath + "...");
|
||||
}
|
||||
|
||||
if (paths.length > 1) {
|
||||
options.logger.log(" with dependencies:")
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
const blacklistRegex = /^truffle\/|\/Migrations.sol$/;
|
||||
|
||||
paths.sort().forEach(fileName => {
|
||||
if (fileName === targetPath) return;
|
||||
if (path.isAbsolute(fileName)) {
|
||||
fileName =
|
||||
"." + path.sep + path.relative(options.working_directory, fileName);
|
||||
}
|
||||
if (fileName.match(blacklistRegex)) return;
|
||||
options.logger.log(" - " + fileName);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
compile.CompilerSupplier = CompilerSupplier;
|
||||
module.exports = compile;
|
|
@ -1,91 +0,0 @@
|
|||
const debug = require("debug")("compile:parser"); // eslint-disable-line no-unused-vars
|
||||
var CompileError = require("./compileerror");
|
||||
|
||||
// Warning issued by a pre-release compiler version, ignored by this component.
|
||||
var preReleaseCompilerWarning =
|
||||
"This is a pre-release compiler version, please do not use it in production.";
|
||||
|
||||
module.exports = {
|
||||
// This needs to be fast! It is fast (as of this writing). Keep it fast!
|
||||
parseImports: function(body, solc) {
|
||||
// WARNING: Kind of a hack (an expedient one).
|
||||
|
||||
// So we don't have to maintain a separate parser, we'll get all the imports
|
||||
// in a file by sending the file to solc and evaluating the error messages
|
||||
// to see what import statements couldn't be resolved. To prevent full-on
|
||||
// compilation when a file has no import statements, we inject an import
|
||||
// statement right on the end; just to ensure it will error and we can parse
|
||||
// the imports speedily without doing extra work.
|
||||
|
||||
// If we're using docker/native, we'll still want to use solcjs to do this part.
|
||||
if (solc.importsParser) solc = solc.importsParser;
|
||||
|
||||
// Helper to detect import errors with an easy regex.
|
||||
var importErrorKey = "TRUFFLE_IMPORT";
|
||||
|
||||
// Inject failing import.
|
||||
var failingImportFileName = "__Truffle__NotFound.sol";
|
||||
|
||||
body = body + "\n\nimport '" + failingImportFileName + "';\n";
|
||||
|
||||
var solcStandardInput = {
|
||||
language: "Solidity",
|
||||
sources: {
|
||||
"ParsedContract.sol": {
|
||||
content: body
|
||||
}
|
||||
},
|
||||
settings: {
|
||||
outputSelection: {
|
||||
"ParsedContract.sol": {
|
||||
"*": [] // We don't need any output.
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var output = solc.compile(JSON.stringify(solcStandardInput), function() {
|
||||
// The existence of this function ensures we get a parsable error message.
|
||||
// Without this, we'll get an error message we *can* detect, but the key will make it easier.
|
||||
// Note: This is not a normal callback. See docs here: https://github.com/ethereum/solc-js#from-version-021
|
||||
return { error: importErrorKey };
|
||||
});
|
||||
|
||||
output = JSON.parse(output);
|
||||
|
||||
// Filter out the "pre-release compiler" warning, if present.
|
||||
var errors = output.errors.filter(function(solidity_error) {
|
||||
return solidity_error.message.indexOf(preReleaseCompilerWarning) < 0;
|
||||
});
|
||||
|
||||
var nonImportErrors = errors.filter(function(solidity_error) {
|
||||
// If the import error key is not found, we must not have an import error.
|
||||
// This means we have a *different* parsing error which we should show to the user.
|
||||
// Note: solc can return multiple parsing errors at once.
|
||||
// We ignore the "pre-release compiler" warning message.
|
||||
return solidity_error.formattedMessage.indexOf(importErrorKey) < 0;
|
||||
});
|
||||
|
||||
// Should we try to throw more than one? (aside; we didn't before)
|
||||
if (nonImportErrors.length > 0) {
|
||||
throw new CompileError(nonImportErrors[0].formattedMessage);
|
||||
}
|
||||
|
||||
// Now, all errors must be import errors.
|
||||
// Filter out our forced import, then get the import paths of the rest.
|
||||
var imports = errors
|
||||
.filter(function(solidity_error) {
|
||||
return solidity_error.message.indexOf(failingImportFileName) < 0;
|
||||
})
|
||||
.map(function(solidity_error) {
|
||||
var matches = solidity_error.formattedMessage.match(
|
||||
/import[^'"]+("|')([^'"]+)("|')/
|
||||
);
|
||||
|
||||
// Return the item between the quotes.
|
||||
return matches[2];
|
||||
});
|
||||
|
||||
return imports;
|
||||
}
|
||||
};
|
|
@ -1,525 +0,0 @@
|
|||
// Compares .sol files to their .sol.js counterparts,
|
||||
// determines which .sol files have been updated.
|
||||
|
||||
var path = require("path");
|
||||
var async = require("async");
|
||||
var fs = require("fs");
|
||||
var Parser = require("./parser");
|
||||
var CompilerSupplier = require("./compilerSupplier");
|
||||
var expect = require("truffle-expect");
|
||||
var find_contracts = require("truffle-contract-sources");
|
||||
var debug = require("debug")("compile:profiler"); // eslint-disable-line no-unused-vars
|
||||
|
||||
module.exports = {
|
||||
updated: function(options, callback) {
|
||||
expect.options(options, ["resolver"]);
|
||||
|
||||
var contracts_directory = options.contracts_directory;
|
||||
|
||||
var build_directory = options.build_mythx_contracts;
|
||||
|
||||
function getFiles(done) {
|
||||
if (options.files) {
|
||||
done(null, options.files);
|
||||
} else {
|
||||
find_contracts(contracts_directory, done);
|
||||
}
|
||||
}
|
||||
|
||||
var sourceFilesArtifacts = {};
|
||||
var sourceFilesArtifactsUpdatedTimes = {};
|
||||
|
||||
var updatedFiles = [];
|
||||
|
||||
async.series(
|
||||
[
|
||||
// Get all the source files and create an object out of them.
|
||||
function(c) {
|
||||
getFiles(function(err, files) {
|
||||
if (err) return c(err);
|
||||
|
||||
// Use an object for O(1) access.
|
||||
files.forEach(function(sourceFile) {
|
||||
sourceFilesArtifacts[sourceFile] = [];
|
||||
});
|
||||
|
||||
c();
|
||||
});
|
||||
},
|
||||
// Get all the artifact files, and read them, parsing them as JSON
|
||||
function(c) {
|
||||
fs.readdir(build_directory, function(err, build_files) {
|
||||
if (err) {
|
||||
// The build directory may not always exist.
|
||||
if (
|
||||
err.message.indexOf("ENOENT: no such file or directory") >= 0
|
||||
) {
|
||||
// Ignore it.
|
||||
build_files = [];
|
||||
} else {
|
||||
return c(err);
|
||||
}
|
||||
}
|
||||
|
||||
build_files = build_files.filter(function(build_file) {
|
||||
return path.extname(build_file) === ".json";
|
||||
});
|
||||
|
||||
async.map(
|
||||
build_files,
|
||||
function(buildFile, finished) {
|
||||
fs.readFile(
|
||||
path.join(build_directory, buildFile),
|
||||
"utf8",
|
||||
function(err, body) {
|
||||
if (err) return finished(err);
|
||||
finished(null, body);
|
||||
}
|
||||
);
|
||||
},
|
||||
function(err, jsonData) {
|
||||
if (err) return c(err);
|
||||
|
||||
try {
|
||||
for (var i = 0; i < jsonData.length; i++) {
|
||||
var data = JSON.parse(jsonData[i]);
|
||||
|
||||
// In case there are artifacts from other source locations.
|
||||
if (sourceFilesArtifacts[data.sourcePath] == null) {
|
||||
sourceFilesArtifacts[data.sourcePath] = [];
|
||||
}
|
||||
|
||||
sourceFilesArtifacts[data.sourcePath].push(data);
|
||||
}
|
||||
} catch (e) {
|
||||
return c(e);
|
||||
}
|
||||
|
||||
c();
|
||||
}
|
||||
);
|
||||
});
|
||||
},
|
||||
function(c) {
|
||||
// Get the minimum updated time for all of a source file's artifacts
|
||||
// (note: one source file might have multiple artifacts).
|
||||
Object.keys(sourceFilesArtifacts).forEach(function(sourceFile) {
|
||||
var artifacts = sourceFilesArtifacts[sourceFile];
|
||||
|
||||
sourceFilesArtifactsUpdatedTimes[sourceFile] = artifacts.reduce(
|
||||
function(minimum, current) {
|
||||
var updatedAt = new Date(current.updatedAt).getTime();
|
||||
|
||||
if (updatedAt < minimum) {
|
||||
return updatedAt;
|
||||
}
|
||||
return minimum;
|
||||
},
|
||||
Number.MAX_SAFE_INTEGER
|
||||
);
|
||||
|
||||
// Empty array?
|
||||
if (
|
||||
sourceFilesArtifactsUpdatedTimes[sourceFile] ===
|
||||
Number.MAX_SAFE_INTEGER
|
||||
) {
|
||||
sourceFilesArtifactsUpdatedTimes[sourceFile] = 0;
|
||||
}
|
||||
});
|
||||
|
||||
c();
|
||||
},
|
||||
// Stat all the source files, getting there updated times, and comparing them to
|
||||
// the artifact updated times.
|
||||
function(c) {
|
||||
var sourceFiles = Object.keys(sourceFilesArtifacts);
|
||||
|
||||
async.map(
|
||||
sourceFiles,
|
||||
function(sourceFile, finished) {
|
||||
fs.stat(sourceFile, function(err, stat) {
|
||||
if (err) {
|
||||
// Ignore it. This means the source file was removed
|
||||
// but the artifact file possibly exists. Return null
|
||||
// to signfy that we should ignore it.
|
||||
stat = null;
|
||||
}
|
||||
finished(null, stat);
|
||||
});
|
||||
},
|
||||
function(err, sourceFileStats) {
|
||||
if (err) return callback(err);
|
||||
|
||||
sourceFiles.forEach(function(sourceFile, index) {
|
||||
var sourceFileStat = sourceFileStats[index];
|
||||
|
||||
// Ignore updating artifacts if source file has been removed.
|
||||
if (sourceFileStat == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
var artifactsUpdatedTime =
|
||||
sourceFilesArtifactsUpdatedTimes[sourceFile] || 0;
|
||||
var sourceFileUpdatedTime = (
|
||||
sourceFileStat.mtime || sourceFileStat.ctime
|
||||
).getTime();
|
||||
|
||||
if (sourceFileUpdatedTime > artifactsUpdatedTime) {
|
||||
updatedFiles.push(sourceFile);
|
||||
}
|
||||
});
|
||||
|
||||
c();
|
||||
}
|
||||
);
|
||||
}
|
||||
],
|
||||
function(err) {
|
||||
callback(err, updatedFiles);
|
||||
}
|
||||
);
|
||||
},
|
||||
|
||||
// Returns the minimal set of sources to pass to solc as compilations targets,
|
||||
// as well as the complete set of sources so solc can resolve the comp targets' imports.
|
||||
required_sources: function(options, callback) {
|
||||
var self = this;
|
||||
|
||||
expect.options(options, ["paths", "base_path", "resolver"]);
|
||||
|
||||
var resolver = options.resolver;
|
||||
|
||||
// Fetch the whole contract set
|
||||
find_contracts(options.contracts_directory, (err, allPaths) => {
|
||||
if (err) return callback(err);
|
||||
|
||||
// Solidity test files might have been injected. Include them in the known set.
|
||||
options.paths.forEach(_path => {
|
||||
if (!allPaths.includes(_path)) {
|
||||
allPaths.push(_path);
|
||||
}
|
||||
});
|
||||
|
||||
var updates = self
|
||||
.convert_to_absolute_paths(options.paths, options.base_path)
|
||||
.sort();
|
||||
allPaths = self
|
||||
.convert_to_absolute_paths(allPaths, options.base_path)
|
||||
.sort();
|
||||
|
||||
var allSources = {};
|
||||
var compilationTargets = [];
|
||||
|
||||
// Load compiler
|
||||
var supplier = new CompilerSupplier(options.compilers.solc);
|
||||
supplier
|
||||
.load()
|
||||
.then(solc => {
|
||||
// Get all the source code
|
||||
self.resolveAllSources(resolver, allPaths, solc, (err, resolved) => {
|
||||
if (err) return callback(err);
|
||||
|
||||
// Generate hash of all sources including external packages - passed to solc inputs.
|
||||
var resolvedPaths = Object.keys(resolved);
|
||||
resolvedPaths.forEach(
|
||||
file => (allSources[file] = resolved[file].body)
|
||||
);
|
||||
|
||||
// Exit w/out minimizing if we've been asked to compile everything, or nothing.
|
||||
if (self.listsEqual(options.paths, allPaths)) {
|
||||
return callback(null, allSources, {});
|
||||
} else if (!options.paths.length) {
|
||||
return callback(null, {}, {});
|
||||
}
|
||||
|
||||
// Seed compilationTargets with known updates
|
||||
updates.forEach(update => compilationTargets.push(update));
|
||||
|
||||
// While there are updated files in the queue, we take each one
|
||||
// and search the entire file corpus to find any sources that import it.
|
||||
// Those sources are added to list of compilation targets as well as
|
||||
// the update queue because their own ancestors need to be discovered.
|
||||
async.whilst(
|
||||
() => updates.length > 0,
|
||||
updateFinished => {
|
||||
var currentUpdate = updates.shift();
|
||||
var files = allPaths.slice();
|
||||
|
||||
// While files: dequeue and inspect their imports
|
||||
async.whilst(
|
||||
() => files.length > 0,
|
||||
fileFinished => {
|
||||
var currentFile = files.shift();
|
||||
|
||||
// Ignore targets already selected.
|
||||
if (compilationTargets.includes(currentFile)) {
|
||||
return fileFinished();
|
||||
}
|
||||
|
||||
var imports;
|
||||
try {
|
||||
imports = self.getImports(
|
||||
currentFile,
|
||||
resolved[currentFile],
|
||||
solc
|
||||
);
|
||||
} catch (err) {
|
||||
err.message =
|
||||
"Error parsing " + currentFile + ": " + e.message;
|
||||
return fileFinished(err);
|
||||
}
|
||||
|
||||
// If file imports a compilation target, add it
|
||||
// to list of updates and compilation targets
|
||||
if (imports.includes(currentUpdate)) {
|
||||
updates.push(currentFile);
|
||||
compilationTargets.push(currentFile);
|
||||
}
|
||||
|
||||
fileFinished();
|
||||
},
|
||||
err => updateFinished(err)
|
||||
);
|
||||
},
|
||||
err =>
|
||||
err
|
||||
? callback(err)
|
||||
: callback(null, allSources, compilationTargets)
|
||||
);
|
||||
});
|
||||
})
|
||||
.catch(callback);
|
||||
});
|
||||
},
|
||||
|
||||
// return requied dependencies(imported) sources
|
||||
// TODO: This is almost copy of required_sources for Hot Fix, so this should be optimized.
|
||||
imported_sources: function(options, callback) {
|
||||
var self = this;
|
||||
|
||||
expect.options(options, ["paths", "base_path", "resolver"]);
|
||||
|
||||
var resolver = options.resolver;
|
||||
|
||||
// Fetch the whole contract set
|
||||
find_contracts(options.contracts_directory, (err, allPaths) => {
|
||||
if (err) return callback(err);
|
||||
|
||||
// Solidity test files might have been injected. Include them in the known set.
|
||||
/*
|
||||
options.paths.forEach(_path => {
|
||||
if (!allPaths.includes(_path)) {
|
||||
allPaths.push(_path);
|
||||
}
|
||||
});
|
||||
*/
|
||||
// Force replacement with options.paths
|
||||
allPaths = options.paths;
|
||||
|
||||
var updates = self
|
||||
.convert_to_absolute_paths(options.paths, options.base_path)
|
||||
.sort();
|
||||
allPaths = self
|
||||
.convert_to_absolute_paths(allPaths, options.base_path)
|
||||
.sort();
|
||||
|
||||
var allSources = {};
|
||||
var compilationTargets = [];
|
||||
|
||||
// Load compiler
|
||||
var supplier = new CompilerSupplier(options.compilers.solc);
|
||||
supplier
|
||||
.load()
|
||||
.then(solc => {
|
||||
// Get all the source code
|
||||
self.resolveAllSources(resolver, allPaths, solc, (err, resolved) => {
|
||||
if (err) return callback(err);
|
||||
|
||||
// Generate hash of all sources including external packages - passed to solc inputs.
|
||||
var resolvedPaths = Object.keys(resolved);
|
||||
resolvedPaths.forEach(
|
||||
file => (allSources[file] = resolved[file].body)
|
||||
);
|
||||
|
||||
// Exit w/out minimizing if we've been asked to compile everything, or nothing.
|
||||
if (self.listsEqual(options.paths, allPaths)) {
|
||||
return callback(null, allSources, {});
|
||||
} else if (!options.paths.length) {
|
||||
return callback(null, {}, {});
|
||||
}
|
||||
|
||||
// Seed compilationTargets with known updates
|
||||
updates.forEach(update => compilationTargets.push(update));
|
||||
|
||||
// While there are updated files in the queue, we take each one
|
||||
// and search the entire file corpus to find any sources that import it.
|
||||
// Those sources are added to list of compilation targets as well as
|
||||
// the update queue because their own ancestors need to be discovered.
|
||||
async.whilst(
|
||||
() => updates.length > 0,
|
||||
updateFinished => {
|
||||
var currentUpdate = updates.shift();
|
||||
var files = allPaths.slice();
|
||||
|
||||
// While files: dequeue and inspect their imports
|
||||
async.whilst(
|
||||
() => files.length > 0,
|
||||
fileFinished => {
|
||||
var currentFile = files.shift();
|
||||
|
||||
// Ignore targets already selected.
|
||||
if (compilationTargets.includes(currentFile)) {
|
||||
return fileFinished();
|
||||
}
|
||||
|
||||
var imports;
|
||||
try {
|
||||
imports = self.getImports(
|
||||
currentFile,
|
||||
resolved[currentFile],
|
||||
solc
|
||||
);
|
||||
} catch (err) {
|
||||
err.message =
|
||||
"Error parsing " + currentFile + ": " + e.message;
|
||||
return fileFinished(err);
|
||||
}
|
||||
|
||||
// If file imports a compilation target, add it
|
||||
// to list of updates and compilation targets
|
||||
if (imports.includes(currentUpdate)) {
|
||||
updates.push(currentFile);
|
||||
compilationTargets.push(currentFile);
|
||||
}
|
||||
|
||||
fileFinished();
|
||||
},
|
||||
err => updateFinished(err)
|
||||
);
|
||||
},
|
||||
err =>
|
||||
err
|
||||
? callback(err)
|
||||
: callback(null, allSources, compilationTargets)
|
||||
);
|
||||
});
|
||||
})
|
||||
.catch(callback);
|
||||
});
|
||||
},
|
||||
|
||||
// Resolves sources in several async passes. For each resolved set it detects unknown
|
||||
// imports from external packages and adds them to the set of files to resolve.
|
||||
resolveAllSources: function(resolver, initialPaths, solc, callback) {
|
||||
var self = this;
|
||||
var mapping = {};
|
||||
var allPaths = initialPaths.slice();
|
||||
|
||||
function generateMapping(finished) {
|
||||
var promises = [];
|
||||
|
||||
// Dequeue all the known paths, generating resolver promises,
|
||||
// We'll add paths if we discover external package imports.
|
||||
while (allPaths.length) {
|
||||
var file;
|
||||
var parent = null;
|
||||
|
||||
var candidate = allPaths.shift();
|
||||
|
||||
// Some paths will have been extracted as imports from a file
|
||||
// and have information about their parent location we need to track.
|
||||
if (typeof candidate === "object") {
|
||||
file = candidate.file;
|
||||
parent = candidate.parent;
|
||||
} else {
|
||||
file = candidate;
|
||||
}
|
||||
var promise = new Promise((accept, reject) => {
|
||||
resolver.resolve(file, parent, (err, body, absolutePath, source) => {
|
||||
err
|
||||
? reject(err)
|
||||
: accept({ file: absolutePath, body: body, source: source });
|
||||
});
|
||||
});
|
||||
promises.push(promise);
|
||||
}
|
||||
|
||||
// Resolve everything known and add it to the map, then inspect each file's
|
||||
// imports and add those to the list of paths to resolve if we don't have it.
|
||||
Promise.all(promises)
|
||||
.then(results => {
|
||||
// Generate the sources mapping
|
||||
results.forEach(
|
||||
item => (mapping[item.file] = Object.assign({}, item))
|
||||
);
|
||||
|
||||
// Queue unknown imports for the next resolver cycle
|
||||
while (results.length) {
|
||||
var result = results.shift();
|
||||
|
||||
// Inspect the imports
|
||||
var imports;
|
||||
try {
|
||||
imports = self.getImports(result.file, result, solc);
|
||||
} catch (err) {
|
||||
err.message = "Error parsing " + result.file + ": " + err.message;
|
||||
return finished(err);
|
||||
}
|
||||
|
||||
// Detect unknown external packages / add them to the list of files to resolve
|
||||
// Keep track of location of this import because we need to report that.
|
||||
imports.forEach(item => {
|
||||
if (!mapping[item])
|
||||
allPaths.push({ file: item, parent: result.file });
|
||||
});
|
||||
}
|
||||
})
|
||||
.catch(finished)
|
||||
.then(finished);
|
||||
}
|
||||
|
||||
async.whilst(
|
||||
() => allPaths.length,
|
||||
generateMapping,
|
||||
err => (err ? callback(err) : callback(null, mapping))
|
||||
);
|
||||
},
|
||||
|
||||
getImports: function(file, resolved, solc) {
|
||||
var self = this;
|
||||
|
||||
var imports = Parser.parseImports(resolved.body, solc);
|
||||
|
||||
// Convert explicitly relative dependencies of modules back into module paths.
|
||||
return imports.map(dependencyPath => {
|
||||
return self.isExplicitlyRelative(dependencyPath)
|
||||
? resolved.source.resolve_dependency_path(file, dependencyPath)
|
||||
: dependencyPath;
|
||||
});
|
||||
},
|
||||
|
||||
listsEqual: function(listA, listB) {
|
||||
var a = listA.sort();
|
||||
var b = listB.sort();
|
||||
|
||||
return JSON.stringify(a) === JSON.stringify(b);
|
||||
},
|
||||
|
||||
convert_to_absolute_paths: function(paths, base) {
|
||||
var self = this;
|
||||
return paths.map(function(p) {
|
||||
// If it's anabsolute paths, leave it alone.
|
||||
if (path.isAbsolute(p)) return p;
|
||||
|
||||
// If it's not explicitly relative, then leave it alone (i.e., it's a module).
|
||||
if (!self.isExplicitlyRelative(p)) return p;
|
||||
|
||||
// Path must be explicitly releative, therefore make it absolute.
|
||||
return path.resolve(path.join(base, p));
|
||||
});
|
||||
},
|
||||
|
||||
isExplicitlyRelative: function(import_path) {
|
||||
return import_path.indexOf(".") === 0;
|
||||
}
|
||||
};
|
10
index.js
10
index.js
|
@ -6,7 +6,7 @@ module.exports = function(embark) {
|
|||
|
||||
// Register for compilation results
|
||||
embark.events.on("contracts:compiled:solc", (res) => {
|
||||
console.log("contracts:compiled:solc", JSON.stringify(res));
|
||||
//console.log("contracts:compiled:solc", JSON.stringify(res));
|
||||
contracts = res;
|
||||
});
|
||||
|
||||
|
@ -28,10 +28,10 @@ module.exports = function(embark) {
|
|||
embark.logger.info('cmd', cmd)
|
||||
embark.logger.info('cfg', JSON.stringify(cfg))
|
||||
try {
|
||||
embark.logger.info("verify process")
|
||||
embark.logger.info("Running MythX analysis in background.")
|
||||
const result = await mythx(contracts, cfg, embark)
|
||||
embark.logger.info("result", result)
|
||||
/*
|
||||
//embark.logger.info("result", result)
|
||||
|
||||
if (returnCode === 0) {
|
||||
return callback(null, "returnCode: " + returnCode)
|
||||
} else if (returnCode === 1) {
|
||||
|
@ -41,7 +41,7 @@ module.exports = function(embark) {
|
|||
//TODO: Figure out how to use error with callback properly.
|
||||
return callback(new Error("Unexpected Error: return value of `analyze` should be either 0 or 1."), null)
|
||||
}
|
||||
*/
|
||||
|
||||
} catch (e) {
|
||||
embark.logger.error("error", e)
|
||||
return callback(e, "ERR: " + e.message)
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
const isFatal = (fatal, severity) => fatal || severity === 2;
|
||||
|
||||
const getUniqueMessages = messages => {
|
||||
const jsonValues = messages.map(m => JSON.stringify(m));
|
||||
const uniuqeValues = jsonValues.reduce((accum, curr) => {
|
||||
if (accum.indexOf(curr) === -1) {
|
||||
accum.push(curr);
|
||||
}
|
||||
return accum;
|
||||
}, []);
|
||||
|
||||
return uniuqeValues.map(v => JSON.parse(v));
|
||||
};
|
||||
|
||||
const calculateErrors = messages =>
|
||||
messages.reduce((acc, { fatal, severity }) => isFatal(fatal , severity) ? acc + 1: acc, 0);
|
||||
|
||||
const calculateWarnings = messages =>
|
||||
messages.reduce((acc, { fatal, severity }) => !isFatal(fatal , severity) ? acc + 1: acc, 0);
|
||||
|
||||
|
||||
const getUniqueIssues = issues =>
|
||||
issues.map(({ messages, ...restProps }) => {
|
||||
const uniqueMessages = getUniqueMessages(messages);
|
||||
const warningCount = calculateWarnings(uniqueMessages);
|
||||
const errorCount = calculateErrors(uniqueMessages);
|
||||
|
||||
return {
|
||||
...restProps,
|
||||
messages: uniqueMessages,
|
||||
errorCount,
|
||||
warningCount,
|
||||
};
|
||||
});
|
||||
|
||||
module.exports = {
|
||||
getUniqueIssues,
|
||||
getUniqueMessages,
|
||||
isFatal,
|
||||
};
|
|
@ -7,11 +7,6 @@ const SourceMappingDecoder = require(
|
|||
const srcmap = require('./srcmap');
|
||||
const mythx = require('./mythXUtil');
|
||||
|
||||
/*
|
||||
Mythril seems to downplay severity. What eslint calls an "error",
|
||||
Mythril calls "warning". And what eslint calls "warning",
|
||||
Mythril calls "informational".
|
||||
*/
|
||||
const mythx2Severity = {
|
||||
High: 2,
|
||||
Medium: 1,
|
||||
|
@ -19,6 +14,39 @@ const mythx2Severity = {
|
|||
|
||||
const isFatal = (fatal, severity) => fatal || severity === 2;
|
||||
|
||||
const getUniqueMessages = messages => {
|
||||
const jsonValues = messages.map(m => JSON.stringify(m));
|
||||
const uniuqeValues = jsonValues.reduce((accum, curr) => {
|
||||
if (accum.indexOf(curr) === -1) {
|
||||
accum.push(curr);
|
||||
}
|
||||
return accum;
|
||||
}, []);
|
||||
|
||||
return uniuqeValues.map(v => JSON.parse(v));
|
||||
};
|
||||
|
||||
const calculateErrors = messages =>
|
||||
messages.reduce((acc, { fatal, severity }) => isFatal(fatal , severity) ? acc + 1: acc, 0);
|
||||
|
||||
const calculateWarnings = messages =>
|
||||
messages.reduce((acc, { fatal, severity }) => !isFatal(fatal , severity) ? acc + 1: acc, 0);
|
||||
|
||||
|
||||
const getUniqueIssues = issues =>
|
||||
issues.map(({ messages, ...restProps }) => {
|
||||
const uniqueMessages = getUniqueMessages(messages);
|
||||
const warningCount = calculateWarnings(uniqueMessages);
|
||||
const errorCount = calculateErrors(uniqueMessages);
|
||||
|
||||
return {
|
||||
...restProps,
|
||||
messages: uniqueMessages,
|
||||
errorCount,
|
||||
warningCount,
|
||||
};
|
||||
});
|
||||
|
||||
const keepIssueInResults = function (issue, config) {
|
||||
|
||||
// omit this issue if its severity is below the config threshold
|
||||
|
@ -38,32 +66,24 @@ const keepIssueInResults = function (issue, config) {
|
|||
|
||||
|
||||
class MythXIssues {
|
||||
/**
|
||||
*
|
||||
* @param {object} buildObj - Truffle smart contract build object
|
||||
*/
|
||||
constructor(buildObj, config) {
|
||||
this.issues = [];
|
||||
this.logs = [];
|
||||
console.log("mythx", JSON.stringify(mythx))
|
||||
//console.log("mythx", JSON.stringify(mythx))
|
||||
this.buildObj = mythx.truffle2MythXJSON(buildObj);
|
||||
//this.buildObj = buildObj;
|
||||
this.debug = config.debug;
|
||||
this.logger = config.logger;
|
||||
console.log("this.buildObj.deployedBytecode", JSON.stringify(this.buildObj.deployedBytecode))
|
||||
this.sourceMap = this.buildObj.sourceMap;
|
||||
this.sourcePath = buildObj.sourcePath;
|
||||
this.deployedSourceMap = this.buildObj.deployedSourceMap;
|
||||
this.offset2InstNum = srcmap.makeOffset2InstNum(this.buildObj.deployedBytecode);
|
||||
this.contractName = buildObj.contractName;
|
||||
this.sourceMappingDecoder = new SourceMappingDecoder();
|
||||
console.log("buildObj", buildObj)
|
||||
//console.log("buildObj", buildObj)
|
||||
this.asts = this.mapAsts(this.buildObj.sources);
|
||||
this.lineBreakPositions = this.mapLineBreakPositions(this.sourceMappingDecoder, this.buildObj.sources);
|
||||
}
|
||||
|
||||
/**
|
||||
* Accepts analyze result issues and groups issues by sourceList
|
||||
*
|
||||
* @param {object[]} issues - MythX analyze API output result issues
|
||||
*/
|
||||
setIssues(issueGroups) {
|
||||
for (let issueGroup of issueGroups) {
|
||||
if (issueGroup.sourceType === 'solidity-file' &&
|
||||
|
@ -82,19 +102,11 @@ class MythXIssues {
|
|||
const remappedIssues = issueGroups.map(mythx.remapMythXOutput);
|
||||
this.issues = remappedIssues
|
||||
.reduce((acc, curr) => acc.concat(curr), []);
|
||||
|
||||
issueGroups.forEach(issueGroup => {
|
||||
this.logs = this.logs.concat((issueGroup.meta && issueGroup.meta.logs) || []);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps linebreak positions of a source to its solidity file from the array of sources
|
||||
*
|
||||
* @param {object} decoder - SourceMappingDecoder object
|
||||
* @param {object[]} sources - Collection of MythX API output sources property.
|
||||
* @returns {object} - linebreak positions grouped by soliduty file paths
|
||||
*/
|
||||
mapLineBreakPositions(decoder, sources) {
|
||||
const result = {};
|
||||
|
||||
|
@ -107,22 +119,17 @@ class MythXIssues {
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps ast objects to its solidity file from the array of sources
|
||||
*
|
||||
* @param {object[]} sources - Collection of MythX API output sources property.
|
||||
* @returns {object} - ast objects grouped by soliduty file paths
|
||||
*/
|
||||
mapAsts (sources) {
|
||||
const result = {};
|
||||
//console.log("sources", JSON.stringify(sources))
|
||||
Object.entries(sources).forEach(([ sourcePath, { ast } ]) => {
|
||||
result[sourcePath] = ast;
|
||||
});
|
||||
|
||||
//console.log("mapAsts output: ", JSON.stringify(result))
|
||||
return result;
|
||||
}
|
||||
|
||||
// Is this an issue that should be ignored?
|
||||
isIgnorable(sourceMapLocation) {
|
||||
const basename = path.basename(this.sourcePath);
|
||||
if (!( basename in this.asts)) {
|
||||
|
@ -143,14 +150,6 @@ class MythXIssues {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Turn a bytecode offset into a line and column location.
|
||||
* We make use of this.sourceMappingDecoder of this class to make
|
||||
* the conversion.
|
||||
*
|
||||
* @param {integer} bytecodeOffset - the offset we want to convert
|
||||
* @returns {line: number, column: number}
|
||||
*/
|
||||
byteOffset2lineColumn(bytecodeOffset, lineBreakPositions) {
|
||||
const instNum = this.offset2InstNum[bytecodeOffset];
|
||||
const sourceLocation = this.sourceMappingDecoder.atIndex(instNum, this.deployedSourceMap);
|
||||
|
@ -158,35 +157,19 @@ class MythXIssues {
|
|||
const loc = this.sourceMappingDecoder
|
||||
.convertOffsetToLineColumn(sourceLocation, lineBreakPositions);
|
||||
|
||||
// FIXME: note we are lossy in that we don't return the end location
|
||||
if (loc.start) {
|
||||
// Adjust because routines starts lines at 0 rather than 1.
|
||||
loc.start.line++;
|
||||
}
|
||||
if (loc.end) {
|
||||
loc.end.line++;
|
||||
}
|
||||
|
||||
// FIXME: Note from discussion with Rocky we agreed
|
||||
// that byteOffset2LineColumn should always return
|
||||
// data even when line/column can't be found.
|
||||
// Default is { start: {line: -1, column: 0}, end: {}}
|
||||
const start = loc.start || { line: -1, column: 0 };
|
||||
const end = loc.end || {};
|
||||
|
||||
return [start, end];
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Turn a srcmap entry (the thing between semicolons) into a line and
|
||||
* column location.
|
||||
* We make use of this.sourceMappingDecoder of this class to make
|
||||
* the conversion.
|
||||
*
|
||||
* @param {string} srcEntry - a single entry of solc sourceMap
|
||||
* @returns {line: number, column: number}
|
||||
*/
|
||||
textSrcEntry2lineColumn(srcEntry, lineBreakPositions) {
|
||||
const ary = srcEntry.split(':');
|
||||
const sourceLocation = {
|
||||
|
@ -195,9 +178,7 @@ class MythXIssues {
|
|||
};
|
||||
const loc = this.sourceMappingDecoder
|
||||
.convertOffsetToLineColumn(sourceLocation, lineBreakPositions);
|
||||
// FIXME: note we are lossy in that we don't return the end location
|
||||
if (loc.start) {
|
||||
// Adjust because routines starts lines at 0 rather than 1.
|
||||
loc.start.line++;
|
||||
}
|
||||
if (loc.end) {
|
||||
|
@ -206,35 +187,6 @@ class MythXIssues {
|
|||
return [loc.start, loc.end];
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a MythX issue into an ESLint-style issue.
|
||||
* The eslint report format which we use, has these fields:
|
||||
*
|
||||
* - column,
|
||||
* - endCol,
|
||||
* - endLine,
|
||||
* - fatal,
|
||||
* - line,
|
||||
* - message,
|
||||
* - ruleId,
|
||||
* - severity
|
||||
*
|
||||
* but a MythX JSON report has these fields:
|
||||
*
|
||||
* - description.head
|
||||
* - description.tail,
|
||||
* - locations
|
||||
* - severity
|
||||
* - swcId
|
||||
* - swcTitle
|
||||
*
|
||||
* @param {MythXIssue} issue - the MythX issue we want to convert
|
||||
* @param {boolean} spaceLimited - true if we have a space-limited report format
|
||||
* @param {string} sourceFormat - the kind of location we have, e.g. evm-bytecode or source text
|
||||
* @param {Array<string>} sourceList - a list container objects (e.g. bytecode, source code) that
|
||||
* holds the locations that are referred to
|
||||
* @returns eslint-issue object
|
||||
*/
|
||||
issue2EsLint(issue, spaceLimited, sourceFormat, sourceName) {
|
||||
const esIssue = {
|
||||
fatal: false,
|
||||
|
@ -270,12 +222,6 @@ class MythXIssues {
|
|||
return esIssue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts MythX analyze API output item to Eslint compatible object
|
||||
* @param {object} report - issue item from the collection MythX analyze API output
|
||||
* @param {boolean} spaceLimited
|
||||
* @returns {object}
|
||||
*/
|
||||
convertMythXReport2EsIssue(report, config, spaceLimited) {
|
||||
const { issues, sourceFormat, source } = report;
|
||||
const result = {
|
||||
|
@ -298,18 +244,173 @@ class MythXIssues {
|
|||
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Transforms array of MythX Issues into Eslint issues
|
||||
*
|
||||
* @param {boolean} spaceLimited
|
||||
* @returns {object[]}
|
||||
*/
|
||||
|
||||
getEslintIssues(config, spaceLimited = false) {
|
||||
return this.issues.map(report => this.convertMythXReport2EsIssue(report, config, spaceLimited));
|
||||
}
|
||||
}
|
||||
|
||||
function doReport(config, objects, errors, notAnalyzedContracts) {
|
||||
let ret = 0;
|
||||
|
||||
// Return true if we shold show log.
|
||||
// Ignore logs with log.level "info" unless the "debug" flag
|
||||
// has been set.
|
||||
function showLog(log) {
|
||||
return config.debug || (log.level !== 'info');
|
||||
}
|
||||
|
||||
// Return 1 if some vulenrabilities were found.
|
||||
objects.forEach(ele => {
|
||||
ele.issues.forEach(ele => {
|
||||
ret = ele.issues.length > 0 ? 1 : ret;
|
||||
})
|
||||
})
|
||||
|
||||
if (config.yaml) {
|
||||
config.logger.info("config.yaml", config.yaml)
|
||||
const yamlDumpObjects = objects;
|
||||
for(let i = 0; i < yamlDumpObjects.length; i++) {
|
||||
delete yamlDumpObjects[i].logger;
|
||||
}
|
||||
config.logger.info(yaml.safeDump(yamlDumpObjects, {'skipInvalid': true}));
|
||||
} else if (config.json) {
|
||||
config.logger.info("config.json", config.json)
|
||||
config.logger.info(JSON.stringify(objects, null, 4));
|
||||
} else {
|
||||
const spaceLimited = ['tap', 'markdown', 'json'].indexOf(config.style) === -1;
|
||||
const eslintIssues = objects
|
||||
.map(obj => obj.getEslintIssues(config, spaceLimited))
|
||||
.reduce((acc, curr) => acc.concat(curr), []);
|
||||
|
||||
// FIXME: temporary solution until backend will return correct filepath and output.
|
||||
const eslintIssuesByBaseName = groupEslintIssuesByBasename(eslintIssues);
|
||||
|
||||
const uniqueIssues = getUniqueIssues(eslintIssuesByBaseName);
|
||||
|
||||
console.log("uniqueIssues", JSON.stringify(uniqueIssues))
|
||||
|
||||
console.log("config.style", config.style)
|
||||
const formatter = getFormatter(config.style);
|
||||
|
||||
config.logger.info("config.logger", JSON.stringify(config.logger))
|
||||
const report = formatter(uniqueIssues);
|
||||
console.log("report", report)
|
||||
config.logger.info(report);
|
||||
}
|
||||
|
||||
const logGroups = objects.map(obj => { return {'sourcePath': obj.sourcePath, 'logs': obj.logs, 'uuid': obj.uuid};})
|
||||
.reduce((acc, curr) => acc.concat(curr), []);
|
||||
|
||||
let haveLogs = false;
|
||||
logGroups.some(logGroup => {
|
||||
logGroup.logs.some(log => {
|
||||
if (showLog(log)) {
|
||||
haveLogs = true;
|
||||
return;
|
||||
}
|
||||
});
|
||||
if(haveLogs) return;
|
||||
});
|
||||
|
||||
if (haveLogs) {
|
||||
ret = 1;
|
||||
config.logger.info('MythX Logs:'.yellow);
|
||||
logGroups.forEach(logGroup => {
|
||||
config.logger.info(`\n${logGroup.sourcePath}`.yellow);
|
||||
config.logger.info(`UUID: ${logGroup.uuid}`.yellow);
|
||||
logGroup.logs.forEach(log => {
|
||||
if (showLog(log)) {
|
||||
config.logger.info(`${log.level}: ${log.msg}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
ret = 1;
|
||||
console.error('Internal MythX errors encountered:'.red);
|
||||
errors.forEach(err => {
|
||||
console.error(err.error || err);
|
||||
if (config.debug > 1 && err.stack) {
|
||||
config.logger.info(err.stack);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
function getFormatter(style) {
|
||||
const formatterName = style || 'stylish';
|
||||
try {
|
||||
const frmtr = require(`eslint/lib/formatters/${formatterName}`);
|
||||
return frmtr
|
||||
} catch (ex) {
|
||||
ex.message = `\nThere was a problem loading formatter option: ${style} \nError: ${
|
||||
ex.message
|
||||
}`;
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
const groupEslintIssuesByBasename = issues => {
|
||||
const path = require('path');
|
||||
const mappedIssues = issues.reduce((accum, issue) => {
|
||||
const {
|
||||
errorCount,
|
||||
warningCount,
|
||||
fixableErrorCount,
|
||||
fixableWarningCount,
|
||||
filePath,
|
||||
messages,
|
||||
} = issue;
|
||||
|
||||
const basename = path.basename(filePath);
|
||||
if (!accum[basename]) {
|
||||
accum[basename] = {
|
||||
errorCount: 0,
|
||||
warningCount: 0,
|
||||
fixableErrorCount: 0,
|
||||
fixableWarningCount: 0,
|
||||
filePath: filePath,
|
||||
messages: [],
|
||||
};
|
||||
}
|
||||
accum[basename].errorCount += errorCount;
|
||||
accum[basename].warningCount += warningCount;
|
||||
accum[basename].fixableErrorCount += fixableErrorCount;
|
||||
accum[basename].fixableWarningCount += fixableWarningCount;
|
||||
accum[basename].messages = accum[basename].messages.concat(messages);
|
||||
return accum;
|
||||
}, {});
|
||||
|
||||
const issueGroups = Object.values(mappedIssues);
|
||||
for (const group of issueGroups) {
|
||||
group.messages = group.messages.sort(function(mess1, mess2) {
|
||||
return compareMessLCRange(mess1, mess2);
|
||||
});
|
||||
|
||||
}
|
||||
return issueGroups;
|
||||
};
|
||||
|
||||
function compareMessLCRange(mess1, mess2) {
|
||||
const c = compareLineCol(mess1.line, mess1.column, mess2.line, mess2.column);
|
||||
return c != 0 ? c : compareLineCol(mess1.endLine, mess1.endCol, mess2.endLine, mess2.endCol);
|
||||
}
|
||||
|
||||
function compareLineCol(line1, column1, line2, column2) {
|
||||
return line1 === line2 ?
|
||||
(column1 - column2) :
|
||||
(line1 - line2);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
MythXIssues,
|
||||
keepIssueInResults
|
||||
keepIssueInResults,
|
||||
getUniqueIssues,
|
||||
getUniqueMessages,
|
||||
isFatal,
|
||||
doReport
|
||||
};
|
||||
|
|
220
lib/mythXUtil.js
220
lib/mythXUtil.js
|
@ -3,7 +3,6 @@
|
|||
const armlet = require('armlet')
|
||||
const fs = require('fs')
|
||||
const util = require('util');
|
||||
const eslintHelpers = require('./eslint');
|
||||
const srcmap = require('./srcmap');
|
||||
|
||||
const getContractFiles = directory => {
|
||||
|
@ -34,7 +33,7 @@ const getNotFoundContracts = (allContractNames, foundContracts) => {
|
|||
|
||||
const buildRequestData = contractObjects => {
|
||||
|
||||
console.log("contractObjects", JSON.stringify(contractObjects))
|
||||
//console.log("contractObjects", JSON.stringify(contractObjects))
|
||||
|
||||
let allContracts = [];
|
||||
|
||||
|
@ -44,7 +43,7 @@ const buildRequestData = contractObjects => {
|
|||
// Read source code from file
|
||||
const source = fs.readFileSync(fileKey, 'utf8')
|
||||
|
||||
console.log("source", source)
|
||||
//console.log("source", source)
|
||||
|
||||
|
||||
Object.keys(contractFile).forEach(function(contractKey, index) {
|
||||
|
@ -62,58 +61,15 @@ const buildRequestData = contractObjects => {
|
|||
source: source
|
||||
};
|
||||
|
||||
console.log("comes out", contract)
|
||||
//console.log("comes out", contract)
|
||||
allContracts = allContracts.concat(contract);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
/*
|
||||
// Remove possible duplicates
|
||||
const uniqueContracts = {};
|
||||
allContracts.forEach(contract => {
|
||||
console.log("contract.contractName", contract.contractName)
|
||||
console.log("contract", JSON.stringify(contract))
|
||||
if (!uniqueContracts[contract.contractName]) {
|
||||
uniqueContracts[contract.contractName] = contract
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
console.log("allContracts without duplicates", JSON.stringify(uniqueContracts))
|
||||
*/
|
||||
|
||||
/*
|
||||
let requestData = [];
|
||||
|
||||
for(const contractName in uniqueContracts) {
|
||||
|
||||
const contract = uniqueContracts[contractName]
|
||||
console.log(contract.className, contract)
|
||||
|
||||
const source = fs.readFileSync(contract.filename, 'utf8')
|
||||
|
||||
console.log("source", source)
|
||||
|
||||
const newContract = {
|
||||
contractName: contract.className,
|
||||
bytecode: contract.runtimeBytecode,
|
||||
deployedBytecode: contract.realRuntimeBytecode,
|
||||
//sources: {}
|
||||
}
|
||||
//newContract.sources[contract.filename] = { "source": source }
|
||||
|
||||
console.log("comes out", newContract)
|
||||
requestData = requestData.concat(newContract);
|
||||
}
|
||||
*/
|
||||
|
||||
return allContracts;
|
||||
};
|
||||
|
||||
// Take truffle's build/contracts/xxx.json JSON and make it
|
||||
// compatible with the Mythril Platform API
|
||||
const truffle2MythXJSON = function(truffleJSON, toolId = 'truffle-security') {
|
||||
const truffle2MythXJSON = function(truffleJSON, toolId = 'embark-mythx') {
|
||||
let {
|
||||
contractName,
|
||||
bytecode,
|
||||
|
@ -128,9 +84,6 @@ const truffle2MythXJSON = function(truffleJSON, toolId = 'truffle-security') {
|
|||
|
||||
const sourcesKey = path.basename(sourcePath);
|
||||
|
||||
// FIXME: why do we only one sourcePath in sourceList?
|
||||
// We shouldn't be zeroing this but instead correcting sourceList to
|
||||
// have the multiple entries.
|
||||
sourceMap = srcmap.zeroedSourceMap(sourceMap);
|
||||
deployedSourceMap = srcmap.zeroedSourceMap(deployedSourceMap);
|
||||
|
||||
|
@ -140,7 +93,7 @@ const truffle2MythXJSON = function(truffleJSON, toolId = 'truffle-security') {
|
|||
deployedBytecode,
|
||||
sourceMap,
|
||||
deployedSourceMap,
|
||||
sourceList: [ sourcePath ],
|
||||
sourceList: [ sourcesKey ],
|
||||
sources: {
|
||||
[sourcesKey]: {
|
||||
source,
|
||||
|
@ -148,10 +101,11 @@ const truffle2MythXJSON = function(truffleJSON, toolId = 'truffle-security') {
|
|||
legacyAST,
|
||||
},
|
||||
},
|
||||
mainSource: sourcesKey,
|
||||
toolId
|
||||
}
|
||||
|
||||
console.log("mythXJSON", mythXJSON)
|
||||
//console.log("mythXJSON", JSON.stringify(mythXJSON))
|
||||
return mythXJSON;
|
||||
};
|
||||
|
||||
|
@ -225,163 +179,6 @@ const cleanAnalyzeDataEmptyProps = (data, debug, logger) => {
|
|||
return result;
|
||||
}
|
||||
|
||||
function doReport(config, objects, errors, notAnalyzedContracts) {
|
||||
let ret = 0;
|
||||
|
||||
// Return true if we shold show log.
|
||||
// Ignore logs with log.level "info" unless the "debug" flag
|
||||
// has been set.
|
||||
function showLog(log) {
|
||||
return config.debug || (log.level !== 'info');
|
||||
}
|
||||
|
||||
// Return 1 if some vulenrabilities were found.
|
||||
objects.forEach(ele => {
|
||||
ele.issues.forEach(ele => {
|
||||
ret = ele.issues.length > 0 ? 1 : ret;
|
||||
})
|
||||
})
|
||||
|
||||
if (config.yaml) {
|
||||
config.logger.info("config.yaml", config.yaml)
|
||||
const yamlDumpObjects = objects;
|
||||
for(let i = 0; i < yamlDumpObjects.length; i++) {
|
||||
delete yamlDumpObjects[i].logger;
|
||||
}
|
||||
config.logger.info(yaml.safeDump(yamlDumpObjects, {'skipInvalid': true}));
|
||||
} else if (config.json) {
|
||||
config.logger.info("config.json", config.json)
|
||||
config.logger.info(JSON.stringify(objects, null, 4));
|
||||
} else {
|
||||
config.logger.info("else", "else")
|
||||
const spaceLimited = ['tap', 'markdown', 'json'].indexOf(config.style) === -1;
|
||||
const eslintIssues = objects
|
||||
.map(obj => obj.getEslintIssues(config, spaceLimited))
|
||||
.reduce((acc, curr) => acc.concat(curr), []);
|
||||
|
||||
// FIXME: temporary solution until backend will return correct filepath and output.
|
||||
const eslintIssuesByBaseName = groupEslintIssuesByBasename(eslintIssues);
|
||||
|
||||
const uniqueIssues = eslintHelpers.getUniqueIssues(eslintIssuesByBaseName);
|
||||
|
||||
console.log("uniqueIssues", JSON.stringify(uniqueIssues))
|
||||
|
||||
console.log("config.style", config.style)
|
||||
const formatter = getFormatter(config.style);
|
||||
|
||||
config.logger.info("config.logger", JSON.stringify(config.logger))
|
||||
const report = formatter(uniqueIssues);
|
||||
console.log("report", report)
|
||||
config.logger.info(report);
|
||||
}
|
||||
|
||||
const logGroups = objects.map(obj => { return {'sourcePath': obj.sourcePath, 'logs': obj.logs, 'uuid': obj.uuid};})
|
||||
.reduce((acc, curr) => acc.concat(curr), []);
|
||||
|
||||
let haveLogs = false;
|
||||
logGroups.some(logGroup => {
|
||||
logGroup.logs.some(log => {
|
||||
if (showLog(log)) {
|
||||
haveLogs = true;
|
||||
return;
|
||||
}
|
||||
});
|
||||
if(haveLogs) return;
|
||||
});
|
||||
|
||||
if (haveLogs) {
|
||||
ret = 1;
|
||||
config.logger.info('MythX Logs:'.yellow);
|
||||
logGroups.forEach(logGroup => {
|
||||
config.logger.info(`\n${logGroup.sourcePath}`.yellow);
|
||||
config.logger.info(`UUID: ${logGroup.uuid}`.yellow);
|
||||
logGroup.logs.forEach(log => {
|
||||
if (showLog(log)) {
|
||||
config.logger.info(`${log.level}: ${log.msg}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
ret = 1;
|
||||
console.error('Internal MythX errors encountered:'.red);
|
||||
errors.forEach(err => {
|
||||
console.error(err.error || err);
|
||||
if (config.debug > 1 && err.stack) {
|
||||
config.logger.info(err.stack);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
function getFormatter(style) {
|
||||
const formatterName = style || 'stylish';
|
||||
try {
|
||||
const frmtr = require(`eslint/lib/formatters/${formatterName}`);
|
||||
return frmtr
|
||||
} catch (ex) {
|
||||
ex.message = `\nThere was a problem loading formatter option: ${style} \nError: ${
|
||||
ex.message
|
||||
}`;
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
const groupEslintIssuesByBasename = issues => {
|
||||
const path = require('path');
|
||||
const mappedIssues = issues.reduce((accum, issue) => {
|
||||
const {
|
||||
errorCount,
|
||||
warningCount,
|
||||
fixableErrorCount,
|
||||
fixableWarningCount,
|
||||
filePath,
|
||||
messages,
|
||||
} = issue;
|
||||
|
||||
const basename = path.basename(filePath);
|
||||
if (!accum[basename]) {
|
||||
accum[basename] = {
|
||||
errorCount: 0,
|
||||
warningCount: 0,
|
||||
fixableErrorCount: 0,
|
||||
fixableWarningCount: 0,
|
||||
filePath: filePath,
|
||||
messages: [],
|
||||
};
|
||||
}
|
||||
accum[basename].errorCount += errorCount;
|
||||
accum[basename].warningCount += warningCount;
|
||||
accum[basename].fixableErrorCount += fixableErrorCount;
|
||||
accum[basename].fixableWarningCount += fixableWarningCount;
|
||||
accum[basename].messages = accum[basename].messages.concat(messages);
|
||||
return accum;
|
||||
}, {});
|
||||
|
||||
const issueGroups = Object.values(mappedIssues);
|
||||
for (const group of issueGroups) {
|
||||
group.messages = group.messages.sort(function(mess1, mess2) {
|
||||
return compareMessLCRange(mess1, mess2);
|
||||
});
|
||||
|
||||
}
|
||||
return issueGroups;
|
||||
};
|
||||
|
||||
function compareMessLCRange(mess1, mess2) {
|
||||
const c = compareLineCol(mess1.line, mess1.column, mess2.line, mess2.column);
|
||||
return c != 0 ? c : compareLineCol(mess1.endLine, mess1.endCol, mess2.endLine, mess2.endCol);
|
||||
}
|
||||
|
||||
function compareLineCol(line1, column1, line2, column2) {
|
||||
return line1 === line2 ?
|
||||
(column1 - column2) :
|
||||
(line1 - line2);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
remapMythXOutput,
|
||||
truffle2MythXJSON,
|
||||
|
@ -389,6 +186,5 @@ module.exports = {
|
|||
getNotFoundContracts,
|
||||
getFoundContractNames,
|
||||
getContractFiles,
|
||||
cleanAnalyzeDataEmptyProps,
|
||||
doReport
|
||||
cleanAnalyzeDataEmptyProps
|
||||
}
|
|
@ -1,6 +1,3 @@
|
|||
// Things involving the richer solc source map with its AST.
|
||||
// We use this to filter out some MythX error messages.
|
||||
//
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
|
@ -9,15 +6,6 @@ const SourceMappingDecoder = require('../compat/remix-lib/sourceMappingDecoder.j
|
|||
const opcodes = require('remix-lib/src/code/opcodes');
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* Return the VariableDeclaration AST node associated with instIndex
|
||||
* if there is one. Otherwise return null.
|
||||
* @param {sourceLocation} string - solc srcmap used to associate the instruction
|
||||
* with an ast node
|
||||
* @param {ast} - solc root AST for contract
|
||||
* @return {AST node or null}
|
||||
*
|
||||
*/
|
||||
isVariableDeclaration: function (srcmap, ast) {
|
||||
const sourceMappingDecoder = new SourceMappingDecoder();
|
||||
const sourceLocation = sourceMappingDecoder.decode(srcmap);
|
||||
|
@ -25,29 +13,12 @@ module.exports = {
|
|||
sourceLocation, ast);
|
||||
},
|
||||
|
||||
/**
|
||||
* Return the true is AST node is a public array.
|
||||
* @param {node} AST node - bytecode offset of instruction
|
||||
* @return {boolean}
|
||||
*
|
||||
*/
|
||||
isDynamicArray: function (node) {
|
||||
// FIXME: do we want to check:
|
||||
// constant: false
|
||||
// storageLocation: 'default'
|
||||
return (node.stateVariable &&
|
||||
node.visibility === 'public' &&
|
||||
node.typeName.nodeType === 'ArrayTypeName');
|
||||
},
|
||||
|
||||
/**
|
||||
* Takes a bytecode hexstring and returns a map indexed by offset
|
||||
* that give the instruction number for that offset.
|
||||
*
|
||||
* @param {hexstr} string - bytecode hexstring
|
||||
* @return {array mapping bytecode offset to an instruction number}
|
||||
*
|
||||
*/
|
||||
makeOffset2InstNum: function(hexstr) {
|
||||
const bytecode = remixUtil.hexToIntArray(hexstr);
|
||||
const instMap = {};
|
||||
|
@ -64,11 +35,6 @@ module.exports = {
|
|||
return instMap;
|
||||
},
|
||||
|
||||
// FIXME: seenIndices is not being used. Remove it?
|
||||
/**
|
||||
* @param {String} sourceMap - solc-type sourceMap
|
||||
* @return {Set} a set containing the "file" indices seen in a sourceMap
|
||||
*/
|
||||
seenIndices: function(sourceMap) {
|
||||
const seen = new Set();
|
||||
const srcArray = sourceMap.split(';');
|
||||
|
@ -88,12 +54,6 @@ module.exports = {
|
|||
return seen;
|
||||
},
|
||||
|
||||
// FIXME: this is just a stopgap measure.
|
||||
// The caller in mythx should be fixed to we don't need this.
|
||||
/**
|
||||
* @param {String} sourceMap - solc-type sourceMap
|
||||
* @return take sourceMap entries and turn them into file index 0
|
||||
*/
|
||||
zeroedSourceMap: function(sourceMap) {
|
||||
const srcArray = sourceMap.split(';');
|
||||
let modArray = [];
|
||||
|
|
59
mythx.js
59
mythx.js
|
@ -4,13 +4,17 @@ const armlet = require('armlet')
|
|||
const fs = require('fs')
|
||||
const mythXUtil = require('./lib/mythXUtil');
|
||||
const asyncPool = require('tiny-async-pool');
|
||||
const { MythXIssues } = require('./lib/issues2eslint');
|
||||
const { MythXIssues, doReport } = require('./lib/issues2eslint');
|
||||
|
||||
const defaultAnalyzeRateLimit = 4
|
||||
|
||||
module.exports = async function analyse(contracts, cfg, embark) {
|
||||
|
||||
//embark.logger.debug("embark.config", embark.config)
|
||||
|
||||
//console.log("embark.logger", embark.logger)
|
||||
//console.log("JSON.stringify(embark.logger)", JSON.stringify(embark.logger))
|
||||
//embark.logger.info("typeof embark.logger", typeof embark.logger)
|
||||
cfg.logger = embark.logger
|
||||
//embark.logger.info("embark", JSON.stringify(embark))
|
||||
|
||||
|
@ -18,11 +22,11 @@ module.exports = async function analyse(contracts, cfg, embark) {
|
|||
const limit = cfg.limit || defaultAnalyzeRateLimit
|
||||
|
||||
if (isNaN(limit)) {
|
||||
console.log(`limit parameter should be a number; got ${limit}.`)
|
||||
embark.logger.info(`limit parameter should be a number; got ${limit}.`)
|
||||
return 1
|
||||
}
|
||||
if (limit < 0 || limit > defaultAnalyzeRateLimit) {
|
||||
console.log(`limit should be between 0 and ${defaultAnalyzeRateLimit}; got ${limit}.`)
|
||||
embark.logger.info(`limit should be between 0 and ${defaultAnalyzeRateLimit}; got ${limit}.`)
|
||||
return 1
|
||||
}
|
||||
|
||||
|
@ -38,49 +42,29 @@ module.exports = async function analyse(contracts, cfg, embark) {
|
|||
//TODO: Check contract names provided in options are respected
|
||||
//const contractNames = cfg._.length > 1 ? cfg._.slice(1, cfg._.length) : null
|
||||
|
||||
// Collect contracts ---
|
||||
|
||||
// Extract list of contracts passed in cli to verify
|
||||
|
||||
/*
|
||||
// Get list of JSON smart contract files from build directory
|
||||
console.log("embark.config.embarkConfig.generationDir", embark.config.buildDir)
|
||||
const contractFiles = mythXUtil.getContractFiles(embark.config.buildDir + "/contracts")
|
||||
|
||||
embark.logger.debug("contractFiles", contractFiles)
|
||||
|
||||
// Parse contracts
|
||||
|
||||
let contractObjects = contractFiles.map(filename => {
|
||||
const jsonFile = fs.readFileSync(filename, 'utf8')
|
||||
//console.log("contract object", jsonFile)
|
||||
return JSON.parse(jsonFile)
|
||||
})
|
||||
*/
|
||||
|
||||
console.log("contracts", contracts)
|
||||
|
||||
//TODO: Possibly need to rewrite the contract objects for MythX to understand
|
||||
//embark.logger.info("contracts", contracts)
|
||||
|
||||
const submitObjects = mythXUtil.buildRequestData(contracts)
|
||||
|
||||
const { objects, errors } = await doAnalysis(armletClient, cfg, submitObjects, limit)
|
||||
process.exit(0)
|
||||
const { objects, errors } = await doAnalysis(armletClient, cfg, submitObjects, null, limit)
|
||||
|
||||
//console.log("objects", JSON.stringify(objects))
|
||||
console.log("errors", errors)
|
||||
//embark.logger.info("errors", errors)
|
||||
|
||||
const result = mythXUtil.doReport(cfg, objects, errors)
|
||||
console.log("result", result)
|
||||
const result = doReport(cfg, objects, errors)
|
||||
//embark.logger.info("result", result)
|
||||
return result
|
||||
}
|
||||
|
||||
const doAnalysis = async (armletClient, config, contracts, contractNames = null, limit) => {
|
||||
|
||||
console.log("\ncontracts", contracts)
|
||||
//config.logger.info("\ncontracts", contracts)
|
||||
|
||||
const timeout = (config.timeout || 300) * 1000;
|
||||
const initialDelay = ('initial-delay' in config) ? config['initial-delay'] * 1000 : undefined;
|
||||
const cacheLookup = ('cache-lookup' in config) ? config['cache-lookup'] : true;
|
||||
//const cacheLookup = ('cache-lookup' in config) ? config['cache-lookup'] : true;
|
||||
const cacheLookup = false
|
||||
|
||||
const results = await asyncPool(limit, contracts, async buildObj => {
|
||||
|
||||
|
@ -101,8 +85,12 @@ const doAnalysis = async (armletClient, config, contracts, contractNames = null,
|
|||
|
||||
// request analysis to armlet.
|
||||
try {
|
||||
console.log("analyzeOpts", JSON.stringify(analyzeOpts))
|
||||
const {issues, status} = await armletClient.analyzeWithStatus(analyzeOpts);
|
||||
//config.logger.info("analyzeOpts", JSON.stringify(analyzeOpts))
|
||||
const armletResult = await armletClient.analyzeWithStatus(analyzeOpts);
|
||||
//config.logger.info("armletResult", JSON.stringify(armletResult))
|
||||
const {issues, status} = armletResult
|
||||
//config.logger.info("issues", issues)
|
||||
//config.logger.info("status", status)
|
||||
obj.uuid = status.uuid;
|
||||
if (config.debug) {
|
||||
config.logger.debug(`${analyzeOpts.data.contractName}: UUID is ${status.uuid}`);
|
||||
|
@ -138,10 +126,13 @@ const doAnalysis = async (armletClient, config, contracts, contractNames = null,
|
|||
return [(buildObj.contractName + ": ").yellow + errStr, null];
|
||||
} else {
|
||||
return [(buildObj.contractName + ": ").red + errStr, null];
|
||||
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
//console.log("results", JSON.stringify(results))
|
||||
|
||||
return results.reduce((accum, curr) => {
|
||||
const [ err, obj ] = curr;
|
||||
if (err) {
|
||||
|
|
Loading…
Reference in New Issue